diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 18795917ffb4..a695d96468bb 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -11,7 +11,7 @@ "packages/google-area120-tables": "0.11.11", "packages/google-cloud-access-approval": "1.13.5", "packages/google-cloud-advisorynotifications": "0.3.10", - "packages/google-cloud-alloydb": "0.3.12", + "packages/google-cloud-alloydb": "0.3.13", "packages/google-cloud-alloydb-connectors": "0.1.6", "packages/google-cloud-api-gateway": "1.9.5", "packages/google-cloud-api-keys": "0.5.11", @@ -27,7 +27,7 @@ "packages/google-cloud-automl": "2.13.5", "packages/google-cloud-backupdr": "0.1.3", "packages/google-cloud-bare-metal-solution": "1.7.5", - "packages/google-cloud-batch": "0.17.26", + "packages/google-cloud-batch": "0.17.27", "packages/google-cloud-beyondcorp-appconnections": "0.4.11", "packages/google-cloud-beyondcorp-appconnectors": "0.4.11", "packages/google-cloud-beyondcorp-appgateways": "0.4.11", @@ -38,7 +38,7 @@ "packages/google-cloud-bigquery-connection": "1.15.5", "packages/google-cloud-bigquery-data-exchange": "0.5.13", "packages/google-cloud-bigquery-datapolicies": "0.6.8", - "packages/google-cloud-bigquery-datatransfer": "3.15.6", + "packages/google-cloud-bigquery-datatransfer": "3.15.7", "packages/google-cloud-bigquery-logging": "1.4.5", "packages/google-cloud-bigquery-migration": "0.11.9", "packages/google-cloud-bigquery-reservation": "1.13.5", @@ -56,7 +56,7 @@ "packages/google-cloud-confidentialcomputing": "0.4.11", "packages/google-cloud-config": "0.1.11", "packages/google-cloud-contact-center-insights": "1.17.5", - "packages/google-cloud-container": "2.50.0", + "packages/google-cloud-container": "2.51.0", "packages/google-cloud-containeranalysis": "2.14.5", "packages/google-cloud-contentwarehouse": "0.7.9", "packages/google-cloud-data-fusion": "1.10.5", @@ -67,20 +67,20 @@ "packages/google-cloud-dataform": "0.5.11", "packages/google-cloud-datalabeling": "1.10.5", "packages/google-cloud-dataplex": "2.2.2", - "packages/google-cloud-dataproc": "5.10.2", + "packages/google-cloud-dataproc": "5.11.0", "packages/google-cloud-dataproc-metastore": "1.15.5", "packages/google-cloud-datastream": "1.9.5", "packages/google-cloud-deploy": "2.0.1", "packages/google-cloud-developerconnect": "0.1.2", "packages/google-cloud-dialogflow": "2.31.0", "packages/google-cloud-dialogflow-cx": "1.35.0", - "packages/google-cloud-discoveryengine": "0.12.1", + "packages/google-cloud-discoveryengine": "0.12.2", "packages/google-cloud-dlp": "3.22.0", "packages/google-cloud-dms": "1.9.5", "packages/google-cloud-documentai": "2.31.0", "packages/google-cloud-domains": "1.7.5", "packages/google-cloud-edgecontainer": "0.5.11", - "packages/google-cloud-edgenetwork": "0.1.10", + "packages/google-cloud-edgenetwork": "0.1.11", "packages/google-cloud-enterpriseknowledgegraph": "0.3.11", "packages/google-cloud-essential-contacts": "1.7.5", "packages/google-cloud-eventarc": "1.11.5", @@ -89,7 +89,7 @@ "packages/google-cloud-functions": "1.17.0", "packages/google-cloud-gdchardwaremanagement": "0.1.3", "packages/google-cloud-gke-backup": "0.5.11", - "packages/google-cloud-gke-connect-gateway": "0.8.11", + "packages/google-cloud-gke-connect-gateway": "0.9.0", "packages/google-cloud-gke-hub": "1.14.2", "packages/google-cloud-gke-multicloud": "0.6.12", "packages/google-cloud-gsuiteaddons": "0.3.10", @@ -129,7 +129,7 @@ "packages/google-cloud-privilegedaccessmanager": "0.1.1", "packages/google-cloud-public-ca": "0.3.12", "packages/google-cloud-rapidmigrationassessment": "0.1.9", - "packages/google-cloud-recaptcha-enterprise": "1.21.2", + "packages/google-cloud-recaptcha-enterprise": "1.22.1", "packages/google-cloud-recommendations-ai": "0.10.12", "packages/google-cloud-recommender": "2.15.5", "packages/google-cloud-redis": "2.15.5", @@ -145,7 +145,7 @@ "packages/google-cloud-securitycentermanagement": "0.1.14", "packages/google-cloud-service-control": "1.12.3", "packages/google-cloud-service-directory": "1.11.6", - "packages/google-cloud-service-management": "1.8.5", + "packages/google-cloud-service-management": "1.9.0", "packages/google-cloud-service-usage": "1.10.5", "packages/google-cloud-servicehealth": "0.1.6", "packages/google-cloud-shell": "1.9.5", @@ -158,7 +158,7 @@ "packages/google-cloud-talent": "2.13.5", "packages/google-cloud-tasks": "2.16.5", "packages/google-cloud-telcoautomation": "0.2.5", - "packages/google-cloud-texttospeech": "2.17.1", + "packages/google-cloud-texttospeech": "2.17.2", "packages/google-cloud-tpu": "1.18.5", "packages/google-cloud-trace": "1.13.5", "packages/google-cloud-translate": "3.16.0", @@ -167,7 +167,7 @@ "packages/google-cloud-video-transcoder": "1.12.5", "packages/google-cloud-videointelligence": "2.13.5", "packages/google-cloud-vision": "3.7.4", - "packages/google-cloud-visionai": "0.1.2", + "packages/google-cloud-visionai": "0.1.3", "packages/google-cloud-vm-migration": "1.8.5", "packages/google-cloud-vmwareengine": "1.5.0", "packages/google-cloud-vpc-access": "1.10.5", @@ -184,7 +184,7 @@ "packages/google-maps-routeoptimization": "0.1.2", "packages/google-maps-routing": "0.6.10", "packages/google-maps-solar": "0.1.2", - "packages/google-shopping-css": "0.1.7", + "packages/google-shopping-css": "0.1.8", "packages/google-shopping-merchant-accounts": "0.1.3", "packages/google-shopping-merchant-conversions": "0.1.3", "packages/google-shopping-merchant-datasources": "0.1.2", diff --git a/CHANGELOG.md b/CHANGELOG.md index 2696012f56a1..9f4e47fb21b5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,7 +30,7 @@ Changelogs - [google-cloud-automl==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) - [google-cloud-backupdr==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr/CHANGELOG.md) - [google-cloud-bare-metal-solution==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) -- [google-cloud-batch==0.17.25](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) +- [google-cloud-batch==0.17.26](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) - [google-cloud-beyondcorp-appconnections==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) - [google-cloud-beyondcorp-appconnectors==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) - [google-cloud-beyondcorp-appgateways==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) @@ -41,7 +41,7 @@ Changelogs - [google-cloud-bigquery-connection==1.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-connection/CHANGELOG.md) - [google-cloud-bigquery-data-exchange==0.5.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) - [google-cloud-bigquery-datapolicies==0.6.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) -- [google-cloud-bigquery-datatransfer==3.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) +- [google-cloud-bigquery-datatransfer==3.15.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) - [google-cloud-bigquery-logging==1.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-logging/CHANGELOG.md) - [google-cloud-bigquery-migration==0.11.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/CHANGELOG.md) - [google-cloud-bigquery-reservation==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-reservation/CHANGELOG.md) @@ -92,7 +92,7 @@ Changelogs - [google-cloud-functions==1.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) - [google-cloud-gdchardwaremanagement==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md) - [google-cloud-gke-backup==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) -- [google-cloud-gke-connect-gateway==0.8.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) +- [google-cloud-gke-connect-gateway==0.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) - [google-cloud-gke-hub==1.14.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-hub/CHANGELOG.md) - [google-cloud-gke-multicloud==0.6.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) - [google-cloud-gsuiteaddons==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gsuiteaddons/CHANGELOG.md) @@ -132,7 +132,7 @@ Changelogs - [google-cloud-privilegedaccessmanager==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-privilegedaccessmanager/CHANGELOG.md) - [google-cloud-public-ca==0.3.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) - [google-cloud-rapidmigrationassessment==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md) -- [google-cloud-recaptcha-enterprise==1.21.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) +- [google-cloud-recaptcha-enterprise==1.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) - [google-cloud-recommendations-ai==0.10.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommendations-ai/CHANGELOG.md) - [google-cloud-recommender==2.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) - [google-cloud-redis-cluster==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis-cluster/CHANGELOG.md) @@ -161,7 +161,7 @@ Changelogs - [google-cloud-talent==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-talent/CHANGELOG.md) - [google-cloud-tasks==2.16.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tasks/CHANGELOG.md) - [google-cloud-telcoautomation==0.2.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-telcoautomation/CHANGELOG.md) -- [google-cloud-texttospeech==2.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-texttospeech/CHANGELOG.md) +- [google-cloud-texttospeech==2.17.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-texttospeech/CHANGELOG.md) - [google-cloud-tpu==1.18.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tpu/CHANGELOG.md) - [google-cloud-trace==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-trace/CHANGELOG.md) - [google-cloud-translate==3.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-translate/CHANGELOG.md) @@ -187,7 +187,7 @@ Changelogs - [google-maps-routeoptimization==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routeoptimization/CHANGELOG.md) - [google-maps-routing==0.6.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routing/CHANGELOG.md) - [google-maps-solar==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-solar/CHANGELOG.md) -- [google-shopping-css==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css/CHANGELOG.md) +- [google-shopping-css==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css/CHANGELOG.md) - [google-shopping-merchant-accounts==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-accounts/CHANGELOG.md) - [google-shopping-merchant-conversions==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-conversions/CHANGELOG.md) - [google-shopping-merchant-datasources==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-datasources/CHANGELOG.md) diff --git a/packages/google-cloud-alloydb/CHANGELOG.md b/packages/google-cloud-alloydb/CHANGELOG.md index c2be3ab22ace..7545a07df323 100644 --- a/packages/google-cloud-alloydb/CHANGELOG.md +++ b/packages/google-cloud-alloydb/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## [0.3.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-v0.3.12...google-cloud-alloydb-v0.3.13) (2024-09-03) + + +### Features + +* support for enabling outbound public IP on an instance ([4f468fa](https://github.com/googleapis/google-cloud-python/commit/4f468fa598c51426ef31ef878f9c3b61f79802f9)) +* support for getting maintenance schedule of a cluster ([4f468fa](https://github.com/googleapis/google-cloud-python/commit/4f468fa598c51426ef31ef878f9c3b61f79802f9)) +* support for getting outbound public IP addresses of an instance ([4f468fa](https://github.com/googleapis/google-cloud-python/commit/4f468fa598c51426ef31ef878f9c3b61f79802f9)) +* support for setting maintenance update policy on a cluster ([4f468fa](https://github.com/googleapis/google-cloud-python/commit/4f468fa598c51426ef31ef878f9c3b61f79802f9)) + ## [0.3.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-v0.3.11...google-cloud-alloydb-v0.3.12) (2024-07-30) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py index 558c8aab67c5..fb3463bbb3c2 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py index 558c8aab67c5..fb3463bbb3c2 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py index 235d25432420..eab98a4fe3ff 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py @@ -1283,7 +1283,7 @@ class QueryInsightsInstanceConfig(proto.Message): query_string_length (int): Query string length. The default value is 1024. Any integer between 256 and 4500 is - considered valid. + considered valid. query_plans_per_minute (int): Number of query execution plans captured by Insights per minute for all queries combined. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py index 558c8aab67c5..fb3463bbb3c2 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py index e4af7a8c95de..22d401ff11cc 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py @@ -1344,7 +1344,7 @@ class QueryInsightsInstanceConfig(proto.Message): query_string_length (int): Query string length. The default value is 1024. Any integer between 256 and 4500 is - considered valid. + considered valid. query_plans_per_minute (int): Number of query execution plans captured by Insights per minute for all queries combined. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py index 648260737ae8..209ee9e5b9ce 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py @@ -35,6 +35,8 @@ EncryptionInfo, Instance, InstanceView, + MaintenanceSchedule, + MaintenanceUpdatePolicy, MigrationSource, SslConfig, SupportedDatabaseFlag, @@ -142,6 +144,8 @@ "ListSupportedDatabaseFlagsResponse", "ListUsersRequest", "ListUsersResponse", + "MaintenanceSchedule", + "MaintenanceUpdatePolicy", "MigrationSource", "OperationMetadata", "PromoteClusterRequest", diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py index 558c8aab67c5..fb3463bbb3c2 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py index 69269be33581..262b798ccfc1 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py @@ -29,6 +29,8 @@ EncryptionInfo, Instance, InstanceView, + MaintenanceSchedule, + MaintenanceUpdatePolicy, MigrationSource, SslConfig, SupportedDatabaseFlag, @@ -95,6 +97,8 @@ "EncryptionConfig", "EncryptionInfo", "Instance", + "MaintenanceSchedule", + "MaintenanceUpdatePolicy", "MigrationSource", "SslConfig", "SupportedDatabaseFlag", diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py index fd99ddd2630f..9cf140f311a4 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py @@ -40,6 +40,8 @@ "ContinuousBackupInfo", "BackupSource", "ContinuousBackupSource", + "MaintenanceUpdatePolicy", + "MaintenanceSchedule", "Cluster", "Instance", "ConnectionInfo", @@ -622,6 +624,69 @@ class ContinuousBackupSource(proto.Message): ) +class MaintenanceUpdatePolicy(proto.Message): + r"""MaintenanceUpdatePolicy defines the policy for system + updates. + + Attributes: + maintenance_windows (MutableSequence[google.cloud.alloydb_v1beta.types.MaintenanceUpdatePolicy.MaintenanceWindow]): + Preferred windows to perform maintenance. + Currently limited to 1. + """ + + class MaintenanceWindow(proto.Message): + r"""MaintenanceWindow specifies a preferred day and time for + maintenance. + + Attributes: + day (google.type.dayofweek_pb2.DayOfWeek): + Preferred day of the week for maintenance, + e.g. MONDAY, TUESDAY, etc. + start_time (google.type.timeofday_pb2.TimeOfDay): + Preferred time to start the maintenance + operation on the specified day. Maintenance will + start within 1 hour of this time. + """ + + day: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + + maintenance_windows: MutableSequence[MaintenanceWindow] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=MaintenanceWindow, + ) + + +class MaintenanceSchedule(proto.Message): + r"""MaintenanceSchedule stores the maintenance schedule generated + from the MaintenanceUpdatePolicy, once a maintenance rollout is + triggered, if MaintenanceWindow is set, and if there is no + conflicting DenyPeriod. The schedule is cleared once the update + takes place. This field cannot be manually changed; modify the + MaintenanceUpdatePolicy instead. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The scheduled start time for the + maintenance. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + + class Cluster(proto.Message): r"""A cluster is a collection of regional AlloyDB resources. It can include a primary instance and one or more read pool @@ -693,7 +758,7 @@ class Cluster(proto.Message): cluster resources are created and from which they are accessible via Private IP. The network must belong to the same project as the cluster. It is specified in the form: - "projects/{project}/global/networks/{network_id}". This is + ``projects/{project}/global/networks/{network_id}``. This is required to create a cluster. Deprecated, use network_config.network instead. etag (str): @@ -752,6 +817,13 @@ class Cluster(proto.Message): specific to PRIMARY cluster. satisfies_pzs (bool): Output only. Reserved for future use. + maintenance_update_policy (google.cloud.alloydb_v1beta.types.MaintenanceUpdatePolicy): + Optional. The maintenance update policy + determines when to allow or deny updates. + maintenance_schedule (google.cloud.alloydb_v1beta.types.MaintenanceSchedule): + Output only. The maintenance schedule for the + cluster, generated for a specific rollout if a + maintenance window is set. """ class State(proto.Enum): @@ -830,7 +902,7 @@ class NetworkConfig(proto.Message): cluster resources are created and from which they are accessible via Private IP. The network must belong to the same project as the cluster. It is specified in the form: - "projects/{project_number}/global/networks/{network_id}". + ``projects/{project_number}/global/networks/{network_id}``. This is required to create a cluster. allocated_ip_range (str): Optional. Name of the allocated IP range for the private IP @@ -1014,6 +1086,16 @@ class PrimaryConfig(proto.Message): proto.BOOL, number=30, ) + maintenance_update_policy: "MaintenanceUpdatePolicy" = proto.Field( + proto.MESSAGE, + number=32, + message="MaintenanceUpdatePolicy", + ) + maintenance_schedule: "MaintenanceSchedule" = proto.Field( + proto.MESSAGE, + number=37, + message="MaintenanceSchedule", + ) class Instance(proto.Message): @@ -1147,6 +1229,9 @@ class Instance(proto.Message): network_config (google.cloud.alloydb_v1beta.types.Instance.InstanceNetworkConfig): Optional. Instance level network configuration. + outbound_public_ip_addresses (MutableSequence[str]): + Output only. All outbound public IP addresses + configured for the instance. """ class State(proto.Enum): @@ -1312,7 +1397,7 @@ class QueryInsightsInstanceConfig(proto.Message): query_string_length (int): Query string length. The default value is 1024. Any integer between 256 and 4500 is - considered valid. + considered valid. query_plans_per_minute (int): Number of query execution plans captured by Insights per minute for all queries combined. @@ -1514,6 +1599,10 @@ class InstanceNetworkConfig(proto.Message): enable_public_ip (bool): Optional. Enabling public ip for the instance. + enable_outbound_public_ip (bool): + Optional. Enabling an outbound public IP + address to support a database server sending + requests out into the internet. """ class AuthorizedNetwork(proto.Message): @@ -1542,6 +1631,10 @@ class AuthorizedNetwork(proto.Message): proto.BOOL, number=2, ) + enable_outbound_public_ip: bool = proto.Field( + proto.BOOL, + number=3, + ) name: str = proto.Field( proto.STRING, @@ -1669,6 +1762,10 @@ class AuthorizedNetwork(proto.Message): number=29, message=InstanceNetworkConfig, ) + outbound_public_ip_addresses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=34, + ) class ConnectionInfo(proto.Message): diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json index 1cb197a5b3b4..70a94ca97046 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.1.0" + "version": "0.3.13" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json index d70e655d280d..1ed9939262fa 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.1.0" + "version": "0.3.13" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json index dc19bd4f2a54..8aac8040c177 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.1.0" + "version": "0.3.13" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py index 510dd441d48d..ab12494b9268 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py @@ -4942,6 +4942,7 @@ def test_get_instance(request_type, transport: str = "grpc"): reconciling=True, etag="etag_value", satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], ) response = client.get_instance(request) @@ -4965,6 +4966,9 @@ def test_get_instance(request_type, transport: str = "grpc"): assert response.reconciling is True assert response.etag == "etag_value" assert response.satisfies_pzs is True + assert response.outbound_public_ip_addresses == [ + "outbound_public_ip_addresses_value" + ] def test_get_instance_empty_call(): @@ -5075,6 +5079,7 @@ async def test_get_instance_empty_call_async(): reconciling=True, etag="etag_value", satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], ) ) response = await client.get_instance() @@ -5155,6 +5160,7 @@ async def test_get_instance_async( reconciling=True, etag="etag_value", satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], ) ) response = await client.get_instance(request) @@ -5179,6 +5185,9 @@ async def test_get_instance_async( assert response.reconciling is True assert response.etag == "etag_value" assert response.satisfies_pzs is True + assert response.outbound_public_ip_addresses == [ + "outbound_public_ip_addresses_value" + ] @pytest.mark.asyncio @@ -15285,6 +15294,10 @@ def test_create_cluster_rest(request_type): ] }, "satisfies_pzs": True, + "maintenance_update_policy": { + "maintenance_windows": [{"day": 1, "start_time": {}}] + }, + "maintenance_schedule": {"start_time": {}}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -15766,6 +15779,10 @@ def test_update_cluster_rest(request_type): ] }, "satisfies_pzs": True, + "maintenance_update_policy": { + "maintenance_windows": [{"day": 1, "start_time": {}}] + }, + "maintenance_schedule": {"start_time": {}}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -17093,6 +17110,10 @@ def test_create_secondary_cluster_rest(request_type): ] }, "satisfies_pzs": True, + "maintenance_update_policy": { + "maintenance_windows": [{"day": 1, "start_time": {}}] + }, + "maintenance_schedule": {"start_time": {}}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -17909,6 +17930,7 @@ def test_get_instance_rest(request_type): reconciling=True, etag="etag_value", satisfies_pzs=True, + outbound_public_ip_addresses=["outbound_public_ip_addresses_value"], ) # Wrap the value into a proper Response obj @@ -17936,6 +17958,9 @@ def test_get_instance_rest(request_type): assert response.reconciling is True assert response.etag == "etag_value" assert response.satisfies_pzs is True + assert response.outbound_public_ip_addresses == [ + "outbound_public_ip_addresses_value" + ] def test_get_instance_rest_use_cached_wrapped_rpc(): @@ -18283,7 +18308,12 @@ def test_create_instance_rest(request_type): "network_config": { "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], "enable_public_ip": True, + "enable_outbound_public_ip": True, }, + "outbound_public_ip_addresses": [ + "outbound_public_ip_addresses_value1", + "outbound_public_ip_addresses_value2", + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -18759,7 +18789,12 @@ def test_create_secondary_instance_rest(request_type): "network_config": { "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], "enable_public_ip": True, + "enable_outbound_public_ip": True, }, + "outbound_public_ip_addresses": [ + "outbound_public_ip_addresses_value1", + "outbound_public_ip_addresses_value2", + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -19249,7 +19284,12 @@ def test_batch_create_instances_rest(request_type): {"cidr_range": "cidr_range_value"} ], "enable_public_ip": True, + "enable_outbound_public_ip": True, }, + "outbound_public_ip_addresses": [ + "outbound_public_ip_addresses_value1", + "outbound_public_ip_addresses_value2", + ], }, "request_id": "request_id_value", "validate_only": True, @@ -19654,7 +19694,12 @@ def test_update_instance_rest(request_type): "network_config": { "authorized_external_networks": [{"cidr_range": "cidr_range_value"}], "enable_public_ip": True, + "enable_outbound_public_ip": True, }, + "outbound_public_ip_addresses": [ + "outbound_public_ip_addresses_value1", + "outbound_public_ip_addresses_value2", + ], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-batch/CHANGELOG.md b/packages/google-cloud-batch/CHANGELOG.md index ef34a533cfa8..addee943a3ea 100644 --- a/packages/google-cloud-batch/CHANGELOG.md +++ b/packages/google-cloud-batch/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.17.27](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.26...google-cloud-batch-v0.17.27) (2024-09-03) + + +### Features + +* **v1:** promote block_project_ssh_keys support to batch v1 API ([63a6de0](https://github.com/googleapis/google-cloud-python/commit/63a6de00b1c6e2b6289b4fa76468859c828cb363)) + ## [0.17.26](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.25...google-cloud-batch-v0.17.26) (2024-08-20) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index e4b992ee0cd9..7de8a6a6838d 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.26" # {x-release-please-version} +__version__ = "0.17.27" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index e4b992ee0cd9..7de8a6a6838d 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.26" # {x-release-please-version} +__version__ = "0.17.27" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py index 47abe1466463..c11a34f16b56 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py @@ -810,6 +810,26 @@ class InstancePolicyOrTemplate(proto.Message): Optional. Set this field true if you want Batch to install Ops Agent on your behalf. Default is false. + block_project_ssh_keys (bool): + Optional. Set this field to ``true`` if you want Batch to + block project-level SSH keys from accessing this job's VMs. + Alternatively, you can configure the job to specify a VM + instance template that blocks project-level SSH keys. In + either case, Batch blocks project-level SSH keys while + creating the VMs for this job. + + Batch allows project-level SSH keys for a job's VMs only if + all the following are true: + + - This field is undefined or set to ``false``. + - The job's VM instance template (if any) doesn't block + project-level SSH keys. + + Notably, you can override this behavior by manually updating + a VM to block or allow project-level SSH keys. For more + information about blocking project-level SSH keys, see the + Compute Engine documentation: + https://cloud.google.com/compute/docs/connect/restrict-ssh-keys#block-keys """ policy: "AllocationPolicy.InstancePolicy" = proto.Field( @@ -831,6 +851,10 @@ class InstancePolicyOrTemplate(proto.Message): proto.BOOL, number=4, ) + block_project_ssh_keys: bool = proto.Field( + proto.BOOL, + number=5, + ) class NetworkInterface(proto.Message): r"""A network interface. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index e4b992ee0cd9..7de8a6a6838d 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.26" # {x-release-please-version} +__version__ = "0.17.27" # {x-release-please-version} diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index 32f12c161ccb..1a9ad7a0b658 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.26" + "version": "0.17.27" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 889a0b1efe83..9c3638c4d767 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.26" + "version": "0.17.27" }, "snippets": [ { diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py index 3a3cc5d39a91..d123a2803997 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py @@ -3867,6 +3867,7 @@ def test_create_job_rest(request_type): "instance_template": "instance_template_value", "install_gpu_drivers": True, "install_ops_agent": True, + "block_project_ssh_keys": True, } ], "service_account": { diff --git a/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md index 649945d0a61f..953e701b7f5e 100644 --- a/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md +++ b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-bigquery-datatransfer/#history +## [3.15.7](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.15.6...google-cloud-bigquery-datatransfer-v3.15.7) (2024-09-04) + + +### Documentation + +* [google-cloud-bigquery-datatransfer] add a note to the CreateTransferConfigRequest and UpdateTransferConfigRequest to disable restricting service account usage ([#13051](https://github.com/googleapis/google-cloud-python/issues/13051)) ([4136c10](https://github.com/googleapis/google-cloud-python/commit/4136c10fabc1df012b028a5d407aaec326e448b6)) + ## [3.15.6](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.15.5...google-cloud-bigquery-datatransfer-v3.15.6) (2024-08-20) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py index 769a9d92f8cf..fc64b41dd679 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.6" # {x-release-please-version} +__version__ = "3.15.7" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py index 769a9d92f8cf..fc64b41dd679 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.6" # {x-release-please-version} +__version__ = "3.15.7" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py index 87ab9e5d50f5..ea32882077da 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py @@ -554,17 +554,20 @@ async def sample_create_transfer_config(): Args: request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.CreateTransferConfigRequest, dict]]): - The request object. A request to create a data transfer - configuration. If new credentials are - needed for this transfer configuration, - authorization info must be provided. If - authorization info is provided, the - transfer configuration will be - associated with the user id - corresponding to the authorization info. - Otherwise, the transfer configuration - will be associated with the calling - user. + The request object. A request to create a data transfer configuration. If + new credentials are needed for this transfer + configuration, authorization info must be provided. If + authorization info is provided, the transfer + configuration will be associated with the user id + corresponding to the authorization info. Otherwise, the + transfer configuration will be associated with the + calling user. + + When using a cross project service account for creating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. parent (:class:`str`): Required. The BigQuery project id where the transfer configuration should be created. Must be in the format @@ -693,10 +696,15 @@ async def sample_update_transfer_config(): Args: request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.UpdateTransferConfigRequest, dict]]): - The request object. A request to update a transfer - configuration. To update the user id of - the transfer configuration, - authorization info needs to be provided. + The request object. A request to update a transfer configuration. To update + the user id of the transfer configuration, authorization + info needs to be provided. + + When using a cross project service account for updating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. transfer_config (:class:`google.cloud.bigquery_datatransfer_v1.types.TransferConfig`): Required. Data transfer configuration to create. diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py index 7bf64c346481..b5032cd300f4 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py @@ -1002,17 +1002,20 @@ def sample_create_transfer_config(): Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.CreateTransferConfigRequest, dict]): - The request object. A request to create a data transfer - configuration. If new credentials are - needed for this transfer configuration, - authorization info must be provided. If - authorization info is provided, the - transfer configuration will be - associated with the user id - corresponding to the authorization info. - Otherwise, the transfer configuration - will be associated with the calling - user. + The request object. A request to create a data transfer configuration. If + new credentials are needed for this transfer + configuration, authorization info must be provided. If + authorization info is provided, the transfer + configuration will be associated with the user id + corresponding to the authorization info. Otherwise, the + transfer configuration will be associated with the + calling user. + + When using a cross project service account for creating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. parent (str): Required. The BigQuery project id where the transfer configuration should be created. Must be in the format @@ -1138,10 +1141,15 @@ def sample_update_transfer_config(): Args: request (Union[google.cloud.bigquery_datatransfer_v1.types.UpdateTransferConfigRequest, dict]): - The request object. A request to update a transfer - configuration. To update the user id of - the transfer configuration, - authorization info needs to be provided. + The request object. A request to update a transfer configuration. To update + the user id of the transfer configuration, authorization + info needs to be provided. + + When using a cross project service account for updating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): Required. Data transfer configuration to create. diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py index cd9e0af0c4d2..91347f2be728 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py @@ -789,17 +789,20 @@ def __call__( Args: request (~.datatransfer.CreateTransferConfigRequest): - The request object. A request to create a data transfer - configuration. If new credentials are - needed for this transfer configuration, - authorization info must be provided. If - authorization info is provided, the - transfer configuration will be - associated with the user id - corresponding to the authorization info. - Otherwise, the transfer configuration - will be associated with the calling - user. + The request object. A request to create a data transfer configuration. If + new credentials are needed for this transfer + configuration, authorization info must be provided. If + authorization info is provided, the transfer + configuration will be associated with the user id + corresponding to the authorization info. Otherwise, the + transfer configuration will be associated with the + calling user. + + When using a cross project service account for creating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2104,10 +2107,15 @@ def __call__( Args: request (~.datatransfer.UpdateTransferConfigRequest): - The request object. A request to update a transfer - configuration. To update the user id of - the transfer configuration, - authorization info needs to be provided. + The request object. A request to update a transfer configuration. To update + the user id of the transfer configuration, authorization + info needs to be provided. + + When using a cross project service account for updating + a transfer config, you must enable cross project service + account usage. For more information, see `Disable + attachment of service accounts to resources in other + projects `__. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py index 30e607bd0c09..e0319b78c3e9 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py @@ -480,6 +480,12 @@ class CreateTransferConfigRequest(proto.Message): user id corresponding to the authorization info. Otherwise, the transfer configuration will be associated with the calling user. + When using a cross project service account for creating a transfer + config, you must enable cross project service account usage. For + more information, see `Disable attachment of service accounts to + resources in other + projects `__. + Attributes: parent (str): Required. The BigQuery project id where the transfer @@ -577,9 +583,15 @@ class CreateTransferConfigRequest(proto.Message): class UpdateTransferConfigRequest(proto.Message): - r"""A request to update a transfer configuration. To update the - user id of the transfer configuration, authorization info needs - to be provided. + r"""A request to update a transfer configuration. To update the user id + of the transfer configuration, authorization info needs to be + provided. + + When using a cross project service account for updating a transfer + config, you must enable cross project service account usage. For + more information, see `Disable attachment of service accounts to + resources in other + projects `__. Attributes: transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): diff --git a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json index 1fbf5757f11d..da58d7e46817 100644 --- a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json +++ b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datatransfer", - "version": "3.15.6" + "version": "3.15.7" }, "snippets": [ { diff --git a/packages/google-cloud-container/CHANGELOG.md b/packages/google-cloud-container/CHANGELOG.md index d939cc42c90a..c39b8c48144d 100644 --- a/packages/google-cloud-container/CHANGELOG.md +++ b/packages/google-cloud-container/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-container/#history +## [2.51.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.50.0...google-cloud-container-v2.51.0) (2024-09-03) + + +### Features + +* add `EXTENDED` enum value for `ReleaseChannel.Channel` ([ea71725](https://github.com/googleapis/google-cloud-python/commit/ea71725d3fe3bde0afd775d20127bed958e8eb8e)) +* add ReleaseChannel EXTENDED value ([ea71725](https://github.com/googleapis/google-cloud-python/commit/ea71725d3fe3bde0afd775d20127bed958e8eb8e)) + ## [2.50.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.49.0...google-cloud-container-v2.50.0) (2024-07-30) diff --git a/packages/google-cloud-container/google/cloud/container/gapic_version.py b/packages/google-cloud-container/google/cloud/container/gapic_version.py index 558c8aab67c5..a08d6ef9b30b 100644 --- a/packages/google-cloud-container/google/cloud/container/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.51.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py index 558c8aab67c5..a08d6ef9b30b 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.51.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py b/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py index e5d04d99d2eb..20d4fb52446a 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py +++ b/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py @@ -8388,11 +8388,17 @@ class Channel(proto.Enum): Clusters subscribed to STABLE receive versions that are known to be stable and reliable in production. + EXTENDED (4): + Clusters subscribed to EXTENDED receive + extended support and availability for versions + which are known to be stable and reliable in + production. """ UNSPECIFIED = 0 RAPID = 1 REGULAR = 2 STABLE = 3 + EXTENDED = 4 channel: Channel = proto.Field( proto.ENUM, diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py index 558c8aab67c5..a08d6ef9b30b 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "2.51.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py b/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py index 04e05220a931..f6187795c40e 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py @@ -10166,11 +10166,17 @@ class Channel(proto.Enum): Clusters subscribed to STABLE receive versions that are known to be stable and reliable in production. + EXTENDED (4): + Clusters subscribed to EXTENDED receive + extended support and availability for versions + which are known to be stable and reliable in + production. """ UNSPECIFIED = 0 RAPID = 1 REGULAR = 2 STABLE = 3 + EXTENDED = 4 channel: Channel = proto.Field( proto.ENUM, diff --git a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json index 2a343a6434fa..b317d472f3e2 100644 --- a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json +++ b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-container", - "version": "0.1.0" + "version": "2.51.0" }, "snippets": [ { diff --git a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json index afdb562b267e..f6b08f446330 100644 --- a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json +++ b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-container", - "version": "0.1.0" + "version": "2.51.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc/CHANGELOG.md b/packages/google-cloud-dataproc/CHANGELOG.md index d912ceecb20e..00b658392846 100644 --- a/packages/google-cloud-dataproc/CHANGELOG.md +++ b/packages/google-cloud-dataproc/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-dataproc/#history +## [5.11.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.10.2...google-cloud-dataproc-v5.11.0) (2024-09-03) + + +### Features + +* add optional parameters (tarball-access) in DiagnoseClusterRequest ([127e5c0](https://github.com/googleapis/google-cloud-python/commit/127e5c097b08042989c124ac4cdfb5147181855d)) + ## [5.10.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.10.1...google-cloud-dataproc-v5.10.2) (2024-07-30) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py index 558c8aab67c5..0f412e925d59 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "5.11.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py index 558c8aab67c5..0f412e925d59 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "5.11.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py index ec33ae97f36c..b6e60e1765c0 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py @@ -2397,6 +2397,10 @@ class DiagnoseClusterRequest(proto.Message): for the diagnostic tarball. If not specified, a task-specific directory in the cluster's staging bucket will be used. + tarball_access (google.cloud.dataproc_v1.types.DiagnoseClusterRequest.TarballAccess): + Optional. (Optional) The access type to the + diagnostic tarball. If not specified, falls back + to default access of the bucket diagnosis_interval (google.type.interval_pb2.Interval): Optional. Time interval in which diagnosis should be carried out on the cluster. @@ -2410,6 +2414,25 @@ class DiagnoseClusterRequest(proto.Message): performed. """ + class TarballAccess(proto.Enum): + r"""Defines who has access to the diagnostic tarball + + Values: + TARBALL_ACCESS_UNSPECIFIED (0): + Tarball Access unspecified. Falls back to + default access of the bucket + GOOGLE_CLOUD_SUPPORT (1): + Google Cloud Support group has read access to + the diagnostic tarball + GOOGLE_DATAPROC_DIAGNOSE (2): + Google Cloud Dataproc Diagnose service + account has read access to the diagnostic + tarball + """ + TARBALL_ACCESS_UNSPECIFIED = 0 + GOOGLE_CLOUD_SUPPORT = 1 + GOOGLE_DATAPROC_DIAGNOSE = 2 + project_id: str = proto.Field( proto.STRING, number=1, @@ -2426,6 +2449,11 @@ class DiagnoseClusterRequest(proto.Message): proto.STRING, number=4, ) + tarball_access: TarballAccess = proto.Field( + proto.ENUM, + number=5, + enum=TarballAccess, + ) diagnosis_interval: interval_pb2.Interval = proto.Field( proto.MESSAGE, number=6, diff --git a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json index c5f4e003db04..c1b4b338fe39 100644 --- a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json +++ b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc", - "version": "0.1.0" + "version": "5.11.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc/scripts/fixup_dataproc_v1_keywords.py b/packages/google-cloud-dataproc/scripts/fixup_dataproc_v1_keywords.py index 0a6aa2e35d07..565665908c4f 100644 --- a/packages/google-cloud-dataproc/scripts/fixup_dataproc_v1_keywords.py +++ b/packages/google-cloud-dataproc/scripts/fixup_dataproc_v1_keywords.py @@ -54,7 +54,7 @@ class dataprocCallTransformer(cst.CSTTransformer): 'delete_session': ('name', 'request_id', ), 'delete_session_template': ('name', ), 'delete_workflow_template': ('name', 'version', ), - 'diagnose_cluster': ('project_id', 'region', 'cluster_name', 'tarball_gcs_dir', 'diagnosis_interval', 'jobs', 'yarn_application_ids', ), + 'diagnose_cluster': ('project_id', 'region', 'cluster_name', 'tarball_gcs_dir', 'tarball_access', 'diagnosis_interval', 'jobs', 'yarn_application_ids', ), 'get_autoscaling_policy': ('name', ), 'get_batch': ('name', ), 'get_cluster': ('project_id', 'region', 'cluster_name', ), diff --git a/packages/google-cloud-discoveryengine/CHANGELOG.md b/packages/google-cloud-discoveryengine/CHANGELOG.md index 06c234fed935..51aeebeac072 100644 --- a/packages/google-cloud-discoveryengine/CHANGELOG.md +++ b/packages/google-cloud-discoveryengine/CHANGELOG.md @@ -1,5 +1,38 @@ # Changelog +## [0.12.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.12.1...google-cloud-discoveryengine-v0.12.2) (2024-09-04) + + +### Features + +* **v1alpha:** return index status in document ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1alpha:** return joined status in user event ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1alpha:** return query segment in NL query understanding ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1alpha:** support batch get documents metadata by uri patterns ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1alpha:** support creating workspace search data stores ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** allow set relevance threshold on search ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** return index status in document ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** return joined status in user event ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** return structured document info in answers ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** support batch documents purge with GCS input ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** support batch get documents metadata by uri patterns ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** support natural language understanding in search ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** support one box search ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1:** promot user event purge to v1 ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1:** promote search tuning service to v1 ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1:** return index status in document ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1:** return joined status in user event ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1:** return structured document info in answers ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1:** support batch documents purge with GCS input ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1:** support batch get documents metadata by uri patterns ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) + + +### Documentation + +* **v1alpha:** keep the API doc up-to-date with recent changes ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1beta:** keep the API doc up-to-date with recent changes ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) +* **v1:** keep the API doc up-to-date with recent changes ([51947fb](https://github.com/googleapis/google-cloud-python/commit/51947fb5913a3859ee45cd66a0dbad51a84cebe9)) + ## [0.12.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.12.0...google-cloud-discoveryengine-v0.12.1) (2024-08-08) diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/search_tuning_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/search_tuning_service.rst new file mode 100644 index 000000000000..02b0fe7ef583 --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/search_tuning_service.rst @@ -0,0 +1,6 @@ +SearchTuningService +------------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1.services.search_tuning_service + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst index 6e0bbcda2e72..411ae24c92cc 100644 --- a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst @@ -15,5 +15,6 @@ Services for Google Cloud Discoveryengine v1 API recommendation_service schema_service search_service + search_tuning_service site_search_engine_service user_event_service diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py index 42959becdbfc..48c89d24d6f4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py @@ -208,6 +208,8 @@ DocumentProcessingConfig, ) from google.cloud.discoveryengine_v1beta.types.document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -289,6 +291,7 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeErrorConfig, PurgeSuggestionDenyListEntriesMetadata, PurgeSuggestionDenyListEntriesRequest, PurgeSuggestionDenyListEntriesResponse, @@ -501,6 +504,8 @@ "UpdateDataStoreRequest", "Document", "DocumentProcessingConfig", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "CreateDocumentRequest", "DeleteDocumentRequest", "GetDocumentRequest", @@ -569,6 +574,7 @@ "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeErrorConfig", "PurgeSuggestionDenyListEntriesMetadata", "PurgeSuggestionDenyListEntriesRequest", "PurgeSuggestionDenyListEntriesResponse", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py index 558c8aab67c5..9e3a3e937556 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.12.2" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py index e06104fe1669..261c72ef4b22 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py @@ -45,6 +45,10 @@ ) from .services.schema_service import SchemaServiceAsyncClient, SchemaServiceClient from .services.search_service import SearchServiceAsyncClient, SearchServiceClient +from .services.search_tuning_service import ( + SearchTuningServiceAsyncClient, + SearchTuningServiceClient, +) from .services.site_search_engine_service import ( SiteSearchEngineServiceAsyncClient, SiteSearchEngineServiceClient, @@ -102,6 +106,7 @@ UpdateConversationRequest, UpdateSessionRequest, ) +from .types.custom_tuning_model import CustomTuningModel from .types.data_store import DataStore from .types.data_store_service import ( CreateDataStoreMetadata, @@ -116,6 +121,8 @@ from .types.document import Document from .types.document_processing_config import DocumentProcessingConfig from .types.document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -173,9 +180,13 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeErrorConfig, PurgeSuggestionDenyListEntriesMetadata, PurgeSuggestionDenyListEntriesRequest, PurgeSuggestionDenyListEntriesResponse, + PurgeUserEventsMetadata, + PurgeUserEventsRequest, + PurgeUserEventsResponse, ) from .types.rank_service import RankingRecord, RankRequest, RankResponse from .types.recommendation_service import RecommendRequest, RecommendResponse @@ -192,6 +203,13 @@ UpdateSchemaRequest, ) from .types.search_service import SearchRequest, SearchResponse +from .types.search_tuning_service import ( + ListCustomModelsRequest, + ListCustomModelsResponse, + TrainCustomModelMetadata, + TrainCustomModelRequest, + TrainCustomModelResponse, +) from .types.session import Query, Session from .types.site_search_engine import SiteSearchEngine, SiteVerificationInfo, TargetSite from .types.site_search_engine_service import ( @@ -248,6 +266,7 @@ "RecommendationServiceAsyncClient", "SchemaServiceAsyncClient", "SearchServiceAsyncClient", + "SearchTuningServiceAsyncClient", "SiteSearchEngineServiceAsyncClient", "UserEventServiceAsyncClient", "AlloyDbSource", @@ -257,6 +276,8 @@ "BatchCreateTargetSiteMetadata", "BatchCreateTargetSitesRequest", "BatchCreateTargetSitesResponse", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "BatchVerifyTargetSitesMetadata", "BatchVerifyTargetSitesRequest", "BatchVerifyTargetSitesResponse", @@ -296,6 +317,7 @@ "CreateTargetSiteMetadata", "CreateTargetSiteRequest", "CustomAttribute", + "CustomTuningModel", "DataStore", "DataStoreServiceClient", "DeleteControlRequest", @@ -359,6 +381,8 @@ "ListControlsResponse", "ListConversationsRequest", "ListConversationsResponse", + "ListCustomModelsRequest", + "ListCustomModelsResponse", "ListDataStoresRequest", "ListDataStoresResponse", "ListDocumentsRequest", @@ -384,9 +408,13 @@ "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeErrorConfig", "PurgeSuggestionDenyListEntriesMetadata", "PurgeSuggestionDenyListEntriesRequest", "PurgeSuggestionDenyListEntriesResponse", + "PurgeUserEventsMetadata", + "PurgeUserEventsRequest", + "PurgeUserEventsResponse", "Query", "RankRequest", "RankResponse", @@ -407,6 +435,7 @@ "SearchResponse", "SearchServiceClient", "SearchTier", + "SearchTuningServiceClient", "SearchUseCase", "Session", "SiteSearchEngine", @@ -417,6 +446,9 @@ "SuggestionDenyListEntry", "TargetSite", "TextInput", + "TrainCustomModelMetadata", + "TrainCustomModelRequest", + "TrainCustomModelResponse", "TransactionInfo", "UpdateControlRequest", "UpdateConversationRequest", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json index 855e95c5aceb..219241f131af 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json @@ -506,6 +506,11 @@ "grpc": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -546,6 +551,11 @@ "grpc-async": { "libraryClient": "DocumentServiceAsyncClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -586,6 +596,11 @@ "rest": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -983,6 +998,55 @@ } } }, + "SearchTuningService": { + "clients": { + "grpc": { + "libraryClient": "SearchTuningServiceClient", + "rpcs": { + "ListCustomModels": { + "methods": [ + "list_custom_models" + ] + }, + "TrainCustomModel": { + "methods": [ + "train_custom_model" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SearchTuningServiceAsyncClient", + "rpcs": { + "ListCustomModels": { + "methods": [ + "list_custom_models" + ] + }, + "TrainCustomModel": { + "methods": [ + "train_custom_model" + ] + } + } + }, + "rest": { + "libraryClient": "SearchTuningServiceClient", + "rpcs": { + "ListCustomModels": { + "methods": [ + "list_custom_models" + ] + }, + "TrainCustomModel": { + "methods": [ + "train_custom_model" + ] + } + } + } + } + }, "SiteSearchEngineService": { "clients": { "grpc": { @@ -1197,6 +1261,11 @@ "import_user_events" ] }, + "PurgeUserEvents": { + "methods": [ + "purge_user_events" + ] + }, "WriteUserEvent": { "methods": [ "write_user_event" @@ -1217,6 +1286,11 @@ "import_user_events" ] }, + "PurgeUserEvents": { + "methods": [ + "purge_user_events" + ] + }, "WriteUserEvent": { "methods": [ "write_user_event" @@ -1237,6 +1311,11 @@ "import_user_events" ] }, + "PurgeUserEvents": { + "methods": [ + "purge_user_events" + ] + }, "WriteUserEvent": { "methods": [ "write_user_event" diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py index 558c8aab67c5..9e3a3e937556 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.12.2" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py index 836224789b61..d47335b10033 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py @@ -490,6 +490,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -552,6 +556,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -1302,6 +1310,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1424,6 +1436,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py index 35b700126b56..377edbf159d7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py @@ -358,8 +358,9 @@ async def sample_create_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ @@ -590,8 +591,9 @@ async def sample_update_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ @@ -702,8 +704,9 @@ async def sample_get_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py index 2a900c13463d..19ac1f8332c8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py @@ -803,8 +803,9 @@ def sample_create_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ @@ -1029,8 +1030,9 @@ def sample_update_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ @@ -1138,8 +1140,9 @@ def sample_get_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py index dc8bd898b465..7d8ea550e2a5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py @@ -414,9 +414,10 @@ def __call__( Returns: ~.gcd_control.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be considered - at serving time. Permitted actions dependent on - ``SolutionType``. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions + dependent on ``SolutionType``. """ @@ -601,9 +602,10 @@ def __call__( Returns: ~.control.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be considered - at serving time. Permitted actions dependent on - ``SolutionType``. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions + dependent on ``SolutionType``. """ @@ -791,9 +793,10 @@ def __call__( Returns: ~.gcd_control.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be considered - at serving time. Permitted actions dependent on - ``SolutionType``. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions + dependent on ``SolutionType``. """ @@ -1059,6 +1062,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1181,6 +1188,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py index 84b6bc30c1a6..f1211380fe64 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py @@ -2213,6 +2213,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -2335,6 +2339,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py index 65051ae919e2..caae22741d06 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py @@ -482,6 +482,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -544,6 +548,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -1266,6 +1274,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1388,6 +1400,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py index 9cf6c715c029..2a549bd41562 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py @@ -1065,7 +1065,11 @@ async def sample_purge_documents(): client = discoveryengine_v1.DocumentServiceAsyncClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) @@ -1142,6 +1146,118 @@ async def sample_purge_documents(): # Done; return the response. return response + async def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1.Document]s. Supported + for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest, dict]]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + parent (:class:`str`): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py index 0de64afac5e6..7cd09e1391e9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py @@ -1519,7 +1519,11 @@ def sample_purge_documents(): client = discoveryengine_v1.DocumentServiceClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) @@ -1594,6 +1598,117 @@ def sample_purge_documents(): # Done; return the response. return response + def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1.Document]s. Supported + for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest, dict]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "DocumentServiceClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py index 4df6d3f36550..5c05090f3e92 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py @@ -181,6 +181,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -263,6 +268,18 @@ def purge_documents( ]: raise NotImplementedError() + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Union[ + document_service.BatchGetDocumentsMetadataResponse, + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py index 7b5a3256af7f..688476d1b16f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py @@ -469,6 +469,37 @@ def purge_documents( ) return self._stubs["purge_documents"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1.Document]s. Supported + for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + ~.BatchGetDocumentsMetadataResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py index 4bd82f9fdb85..7667dceb4ddb 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py @@ -484,6 +484,37 @@ def purge_documents( ) return self._stubs["purge_documents"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1.Document]s. Supported + for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + Awaitable[~.BatchGetDocumentsMetadataResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -531,6 +562,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method_async.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py index 1418d3d2036b..51788a4c0f13 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py @@ -79,6 +79,14 @@ class DocumentServiceRestInterceptor: .. code-block:: python class MyCustomDocumentServiceInterceptor(DocumentServiceRestInterceptor): + def pre_batch_get_documents_metadata(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_get_documents_metadata(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_document(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -137,6 +145,31 @@ def post_update_document(self, response): """ + def pre_batch_get_documents_metadata( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_service.BatchGetDocumentsMetadataRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_batch_get_documents_metadata( + self, response: document_service.BatchGetDocumentsMetadataResponse + ) -> document_service.BatchGetDocumentsMetadataResponse: + """Post-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + def pre_create_document( self, request: document_service.CreateDocumentRequest, @@ -532,6 +565,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -594,6 +631,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -621,6 +662,105 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _BatchGetDocumentsMetadata(DocumentServiceRestStub): + def __hash__(self): + return hash("BatchGetDocumentsMetadata") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "matcher": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Call the batch get documents + metadata method over HTTP. + + Args: + request (~.document_service.BatchGetDocumentsMetadataRequest): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_service.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + ] + request, metadata = self._interceptor.pre_batch_get_documents_metadata( + request, metadata + ) + pb_request = document_service.BatchGetDocumentsMetadataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_service.BatchGetDocumentsMetadataResponse() + pb_resp = document_service.BatchGetDocumentsMetadataResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_documents_metadata(resp) + return resp + class _CreateDocument(DocumentServiceRestStub): def __hash__(self): return hash("CreateDocument") @@ -1294,6 +1434,17 @@ def __call__( resp = self._interceptor.post_update_document(resp) return resp + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchGetDocumentsMetadata(self._session, self._host, self._interceptor) # type: ignore + @property def create_document( self, @@ -1508,6 +1659,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1630,6 +1785,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py index 9f8887074424..5cdbcf388f23 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py @@ -477,6 +477,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -539,6 +543,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -1224,6 +1232,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1346,6 +1358,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py index ee8a9194089c..ef84072de1d1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py @@ -533,6 +533,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -655,6 +659,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py index b8a0f24db978..86fe2e880195 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py @@ -355,6 +355,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -417,6 +421,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -705,6 +713,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -827,6 +839,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py index 8796a94833df..c4725bfa7dfa 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py @@ -524,6 +524,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -646,6 +650,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py index 8b3979d914df..dac81936a0ee 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py @@ -538,6 +538,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -660,6 +664,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py index 214d34eaee49..56d8cdb52691 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py @@ -477,6 +477,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -539,6 +543,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -1243,6 +1251,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1365,6 +1377,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py index c8165a22227b..6d55c0b7011d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py @@ -536,6 +536,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -658,6 +662,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/__init__.py new file mode 100644 index 000000000000..71fba7ca5f3f --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import SearchTuningServiceAsyncClient +from .client import SearchTuningServiceClient + +__all__ = ( + "SearchTuningServiceClient", + "SearchTuningServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/async_client.py new file mode 100644 index 000000000000..af413b4bedf6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/async_client.py @@ -0,0 +1,654 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import ( + custom_tuning_model, + search_tuning_service, +) + +from .client import SearchTuningServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport +from .transports.grpc_asyncio import SearchTuningServiceGrpcAsyncIOTransport + + +class SearchTuningServiceAsyncClient: + """Service for search tuning.""" + + _client: SearchTuningServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = SearchTuningServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SearchTuningServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = SearchTuningServiceClient._DEFAULT_UNIVERSE + + custom_tuning_model_path = staticmethod( + SearchTuningServiceClient.custom_tuning_model_path + ) + parse_custom_tuning_model_path = staticmethod( + SearchTuningServiceClient.parse_custom_tuning_model_path + ) + data_store_path = staticmethod(SearchTuningServiceClient.data_store_path) + parse_data_store_path = staticmethod( + SearchTuningServiceClient.parse_data_store_path + ) + common_billing_account_path = staticmethod( + SearchTuningServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SearchTuningServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SearchTuningServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + SearchTuningServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + SearchTuningServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SearchTuningServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(SearchTuningServiceClient.common_project_path) + parse_common_project_path = staticmethod( + SearchTuningServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(SearchTuningServiceClient.common_location_path) + parse_common_location_path = staticmethod( + SearchTuningServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchTuningServiceAsyncClient: The constructed client. + """ + return SearchTuningServiceClient.from_service_account_info.__func__(SearchTuningServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchTuningServiceAsyncClient: The constructed client. + """ + return SearchTuningServiceClient.from_service_account_file.__func__(SearchTuningServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SearchTuningServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SearchTuningServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SearchTuningServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = SearchTuningServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + SearchTuningServiceTransport, + Callable[..., SearchTuningServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the search tuning service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,SearchTuningServiceTransport,Callable[..., SearchTuningServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SearchTuningServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SearchTuningServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def train_custom_model( + self, + request: Optional[ + Union[search_tuning_service.TrainCustomModelRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Trains a custom model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_train_custom_model(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.TrainCustomModelRequest( + data_store="data_store_value", + ) + + # Make the request + operation = client.train_custom_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.TrainCustomModelRequest, dict]]): + The request object. Request message for + [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.TrainCustomModelResponse` Response of the + [TrainCustomModelRequest][google.cloud.discoveryengine.v1.TrainCustomModelRequest]. + This message is returned by the + google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, search_tuning_service.TrainCustomModelRequest): + request = search_tuning_service.TrainCustomModelRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.train_custom_model + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + search_tuning_service.TrainCustomModelResponse, + metadata_type=search_tuning_service.TrainCustomModelMetadata, + ) + + # Done; return the response. + return response + + async def list_custom_models( + self, + request: Optional[ + Union[search_tuning_service.ListCustomModelsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> search_tuning_service.ListCustomModelsResponse: + r"""Gets a list of all the custom models. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_list_custom_models(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Make the request + response = await client.list_custom_models(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.ListCustomModelsRequest, dict]]): + The request object. Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.ListCustomModelsResponse: + Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, search_tuning_service.ListCustomModelsRequest): + request = search_tuning_service.ListCustomModelsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_custom_models + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "SearchTuningServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SearchTuningServiceAsyncClient",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/client.py similarity index 51% rename from packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/client.py rename to packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/client.py index bc179d1d0508..97d62eb19d51 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/client.py @@ -41,23 +41,31 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.cloud.gkeconnect.gateway_v1beta1 import gapic_version as package_version +from google.cloud.discoveryengine_v1 import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -from google.api import httpbody_pb2 # type: ignore -from google.protobuf import any_pb2 # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore -from .transports.base import DEFAULT_CLIENT_INFO, GatewayServiceTransport -from .transports.grpc import GatewayServiceGrpcTransport -from .transports.grpc_asyncio import GatewayServiceGrpcAsyncIOTransport +from google.cloud.discoveryengine_v1.types import ( + custom_tuning_model, + search_tuning_service, +) + +from .transports.base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport +from .transports.grpc import SearchTuningServiceGrpcTransport +from .transports.grpc_asyncio import SearchTuningServiceGrpcAsyncIOTransport +from .transports.rest import SearchTuningServiceRestTransport -class GatewayServiceClientMeta(type): - """Metaclass for the GatewayService client. +class SearchTuningServiceClientMeta(type): + """Metaclass for the SearchTuningService client. This provides class-level methods for building and retrieving support objects (e.g. transport) without polluting the client instance @@ -66,14 +74,15 @@ class GatewayServiceClientMeta(type): _transport_registry = ( OrderedDict() - ) # type: Dict[str, Type[GatewayServiceTransport]] - _transport_registry["grpc"] = GatewayServiceGrpcTransport - _transport_registry["grpc_asyncio"] = GatewayServiceGrpcAsyncIOTransport + ) # type: Dict[str, Type[SearchTuningServiceTransport]] + _transport_registry["grpc"] = SearchTuningServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SearchTuningServiceGrpcAsyncIOTransport + _transport_registry["rest"] = SearchTuningServiceRestTransport def get_transport_class( cls, label: Optional[str] = None, - ) -> Type[GatewayServiceTransport]: + ) -> Type[SearchTuningServiceTransport]: """Returns an appropriate transport class. Args: @@ -92,14 +101,8 @@ def get_transport_class( return next(iter(cls._transport_registry.values())) -class GatewayServiceClient(metaclass=GatewayServiceClientMeta): - """Gateway service is a public API which works as a Kubernetes - resource model proxy between end users and registered Kubernetes - clusters. Each RPC in this service matches with an HTTP verb. - End user will initiate kubectl commands against the Gateway - service, and Gateway service will forward user requests to - clusters. - """ +class SearchTuningServiceClient(metaclass=SearchTuningServiceClientMeta): + """Service for search tuning.""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): @@ -132,12 +135,12 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "connectgateway.googleapis.com" + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) - _DEFAULT_ENDPOINT_TEMPLATE = "connectgateway.{UNIVERSE_DOMAIN}" + _DEFAULT_ENDPOINT_TEMPLATE = "discoveryengine.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" @classmethod @@ -151,7 +154,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - GatewayServiceClient: The constructed client. + SearchTuningServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_info(info) kwargs["credentials"] = credentials @@ -169,7 +172,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - GatewayServiceClient: The constructed client. + SearchTuningServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -178,15 +181,61 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @property - def transport(self) -> GatewayServiceTransport: + def transport(self) -> SearchTuningServiceTransport: """Returns the transport used by the client instance. Returns: - GatewayServiceTransport: The transport used by the client + SearchTuningServiceTransport: The transport used by the client instance. """ return self._transport + @staticmethod + def custom_tuning_model_path( + project: str, + location: str, + data_store: str, + custom_tuning_model: str, + ) -> str: + """Returns a fully-qualified custom_tuning_model string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/customTuningModels/{custom_tuning_model}".format( + project=project, + location=location, + data_store=data_store, + custom_tuning_model=custom_tuning_model, + ) + + @staticmethod + def parse_custom_tuning_model_path(path: str) -> Dict[str, str]: + """Parses a custom_tuning_model path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/customTuningModels/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -406,14 +455,14 @@ def _get_api_endpoint( elif use_mtls_endpoint == "always" or ( use_mtls_endpoint == "auto" and client_cert_source ): - _default_universe = GatewayServiceClient._DEFAULT_UNIVERSE + _default_universe = SearchTuningServiceClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: raise MutualTLSChannelError( f"mTLS is not supported in any universe other than {_default_universe}." ) - api_endpoint = GatewayServiceClient.DEFAULT_MTLS_ENDPOINT + api_endpoint = SearchTuningServiceClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = GatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + api_endpoint = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=universe_domain ) return api_endpoint @@ -434,7 +483,7 @@ def _get_universe_domain( Raises: ValueError: If the universe domain is an empty string. """ - universe_domain = GatewayServiceClient._DEFAULT_UNIVERSE + universe_domain = SearchTuningServiceClient._DEFAULT_UNIVERSE if client_universe_domain is not None: universe_domain = client_universe_domain elif universe_domain_env is not None: @@ -460,7 +509,7 @@ def _compare_universes( ValueError: when client_universe does not match the universe in credentials. """ - default_universe = GatewayServiceClient._DEFAULT_UNIVERSE + default_universe = SearchTuningServiceClient._DEFAULT_UNIVERSE credentials_universe = getattr(credentials, "universe_domain", default_universe) if client_universe != credentials_universe: @@ -484,7 +533,7 @@ def _validate_universe_domain(self): """ self._is_universe_domain_valid = ( self._is_universe_domain_valid - or GatewayServiceClient._compare_universes( + or SearchTuningServiceClient._compare_universes( self.universe_domain, self.transport._credentials ) ) @@ -513,12 +562,16 @@ def __init__( *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[ - Union[str, GatewayServiceTransport, Callable[..., GatewayServiceTransport]] + Union[ + str, + SearchTuningServiceTransport, + Callable[..., SearchTuningServiceTransport], + ] ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the gateway service client. + """Instantiates the search tuning service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -526,10 +579,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Optional[Union[str,GatewayServiceTransport,Callable[..., GatewayServiceTransport]]]): + transport (Optional[Union[str,SearchTuningServiceTransport,Callable[..., SearchTuningServiceTransport]]]): The transport to use, or a Callable that constructs and returns a new transport. If a Callable is given, it will be called with the same set of initialization - arguments as used in the GatewayServiceTransport constructor. + arguments as used in the SearchTuningServiceTransport constructor. If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -582,11 +635,11 @@ def __init__( self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env, - ) = GatewayServiceClient._read_environment_variables() - self._client_cert_source = GatewayServiceClient._get_client_cert_source( + ) = SearchTuningServiceClient._read_environment_variables() + self._client_cert_source = SearchTuningServiceClient._get_client_cert_source( self._client_options.client_cert_source, self._use_client_cert ) - self._universe_domain = GatewayServiceClient._get_universe_domain( + self._universe_domain = SearchTuningServiceClient._get_universe_domain( universe_domain_opt, self._universe_domain_env ) self._api_endpoint = None # updated below, depending on `transport` @@ -603,9 +656,9 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, GatewayServiceTransport) + transport_provided = isinstance(transport, SearchTuningServiceTransport) if transport_provided: - # transport is a GatewayServiceTransport instance. + # transport is a SearchTuningServiceTransport instance. if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " @@ -616,12 +669,12 @@ def __init__( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = cast(GatewayServiceTransport, transport) + self._transport = cast(SearchTuningServiceTransport, transport) self._api_endpoint = self._transport.host self._api_endpoint = ( self._api_endpoint - or GatewayServiceClient._get_api_endpoint( + or SearchTuningServiceClient._get_api_endpoint( self._client_options.api_endpoint, self._client_cert_source, self._universe_domain, @@ -640,11 +693,12 @@ def __init__( ) transport_init: Union[ - Type[GatewayServiceTransport], Callable[..., GatewayServiceTransport] + Type[SearchTuningServiceTransport], + Callable[..., SearchTuningServiceTransport], ] = ( - GatewayServiceClient.get_transport_class(transport) + SearchTuningServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None - else cast(Callable[..., GatewayServiceTransport], transport) + else cast(Callable[..., SearchTuningServiceTransport], transport) ) # initialize with the provided callable or the passed in class self._transport = transport_init( @@ -659,16 +713,17 @@ def __init__( api_audience=self._client_options.api_audience, ) - def get_resource( + def train_custom_model( self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, + request: Optional[ + Union[search_tuning_service.TrainCustomModelRequest, dict] + ] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""GetResource performs an HTTP GET request on the - Kubernetes API Server. + ) -> operation.Operation: + r"""Trains a custom model. .. code-block:: python @@ -679,74 +734,32 @@ def get_resource( # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 + from google.cloud import discoveryengine_v1 - def sample_get_resource(): + def sample_train_custom_model(): # Create a client - client = gateway_v1beta1.GatewayServiceClient() + client = discoveryengine_v1.SearchTuningServiceClient() # Initialize request argument(s) - request = httpbody_pb2.HttpBody( + request = discoveryengine_v1.TrainCustomModelRequest( + data_store="data_store_value", ) # Make the request - response = client.get_resource(request=request) + operation = client.train_custom_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) Args: - request (Union[google.api.httpbody_pb2.HttpBody, dict]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. + request (Union[google.cloud.discoveryengine_v1.types.TrainCustomModelRequest, dict]): + The request object. Request message for + [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel] + method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -754,67 +767,32 @@ def sample_get_resource(): sent along with the request as metadata. Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: + google.api_core.operation.Operation: + An object representing a long-running operation. - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.TrainCustomModelResponse` Response of the + [TrainCustomModelRequest][google.cloud.discoveryengine.v1.TrainCustomModelRequest]. + This message is returned by the + google.longrunning.Operations.response field. """ # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = httpbody_pb2.HttpBody(**request) - elif not request: - # Null request, just make one. - request = httpbody_pb2.HttpBody() + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, search_tuning_service.TrainCustomModelRequest): + request = search_tuning_service.TrainCustomModelRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_resource] + rpc = self._transport._wrapped_methods[self._transport.train_custom_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) # Validate the universe domain. self._validate_universe_domain() @@ -827,19 +805,28 @@ def sample_get_resource(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + search_tuning_service.TrainCustomModelResponse, + metadata_type=search_tuning_service.TrainCustomModelMetadata, + ) + # Done; return the response. return response - def post_resource( + def list_custom_models( self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, + request: Optional[ + Union[search_tuning_service.ListCustomModelsRequest, dict] + ] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""PostResource performs an HTTP POST on the Kubernetes - API Server. + ) -> search_tuning_service.ListCustomModelsResponse: + r"""Gets a list of all the custom models. .. code-block:: python @@ -850,74 +837,28 @@ def post_resource( # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 + from google.cloud import discoveryengine_v1 - def sample_post_resource(): + def sample_list_custom_models(): # Create a client - client = gateway_v1beta1.GatewayServiceClient() + client = discoveryengine_v1.SearchTuningServiceClient() # Initialize request argument(s) - request = httpbody_pb2.HttpBody( + request = discoveryengine_v1.ListCustomModelsRequest( + data_store="data_store_value", ) # Make the request - response = client.post_resource(request=request) + response = client.list_custom_models(request=request) # Handle the response print(response) Args: - request (Union[google.api.httpbody_pb2.HttpBody, dict]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. + request (Union[google.cloud.discoveryengine_v1.types.ListCustomModelsRequest, dict]): + The request object. Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -925,67 +866,29 @@ def sample_post_resource(): sent along with the request as metadata. Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. + google.cloud.discoveryengine_v1.types.ListCustomModelsResponse: + Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. """ # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = httpbody_pb2.HttpBody(**request) - elif not request: - # Null request, just make one. - request = httpbody_pb2.HttpBody() + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, search_tuning_service.ListCustomModelsRequest): + request = search_tuning_service.ListCustomModelsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.post_resource] + rpc = self._transport._wrapped_methods[self._transport.list_custom_models] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) # Validate the universe domain. self._validate_universe_domain() @@ -1001,162 +904,61 @@ def sample_post_resource(): # Done; return the response. return response - def delete_resource( + def __enter__(self) -> "SearchTuningServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, + request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""DeleteResource performs an HTTP DELETE on the - Kubernetes API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - def sample_delete_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = client.delete_resource(request=request) - - # Handle the response - print(response) + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. Args: - request (Union[google.api.httpbody_pb2.HttpBody, dict]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. """ # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = httpbody_pb2.HttpBody(**request) - elif not request: - # Null request, just make one. - request = httpbody_pb2.HttpBody() + request = operations_pb2.ListOperationsRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_resource] + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) # Validate the universe domain. self._validate_universe_domain() @@ -1172,162 +974,48 @@ def sample_delete_resource(): # Done; return the response. return response - def put_resource( + def get_operation( self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, + request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""PutResource performs an HTTP PUT on the Kubernetes - API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - def sample_put_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = client.put_resource(request=request) - - # Handle the response - print(response) + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. Args: - request (Union[google.api.httpbody_pb2.HttpBody, dict]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - + ~.operations_pb2.Operation: + An ``Operation`` object. """ # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = httpbody_pb2.HttpBody(**request) - elif not request: - # Null request, just make one. - request = httpbody_pb2.HttpBody() + request = operations_pb2.GetOperationRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.put_resource] + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) # Validate the universe domain. self._validate_universe_domain() @@ -1343,194 +1031,67 @@ def sample_put_resource(): # Done; return the response. return response - def patch_resource( + def cancel_operation( self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, + request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""PatchResource performs an HTTP PATCH on the - Kubernetes API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - def sample_patch_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = client.patch_resource(request=request) + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. - # Handle the response - print(response) + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. Args: - request (Union[google.api.httpbody_pb2.HttpBody, dict]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - + None """ # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = httpbody_pb2.HttpBody(**request) - elif not request: - # Null request, just make one. - request = httpbody_pb2.HttpBody() + request = operations_pb2.CancelOperationRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.patch_resource] + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - response = rpc( + rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - # Done; return the response. - return response - - def __enter__(self) -> "GatewayServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) -__all__ = ("GatewayServiceClient",) +__all__ = ("SearchTuningServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/__init__.py new file mode 100644 index 000000000000..8c56b6549cef --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SearchTuningServiceTransport +from .grpc import SearchTuningServiceGrpcTransport +from .grpc_asyncio import SearchTuningServiceGrpcAsyncIOTransport +from .rest import SearchTuningServiceRestInterceptor, SearchTuningServiceRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[SearchTuningServiceTransport]] +_transport_registry["grpc"] = SearchTuningServiceGrpcTransport +_transport_registry["grpc_asyncio"] = SearchTuningServiceGrpcAsyncIOTransport +_transport_registry["rest"] = SearchTuningServiceRestTransport + +__all__ = ( + "SearchTuningServiceTransport", + "SearchTuningServiceGrpcTransport", + "SearchTuningServiceGrpcAsyncIOTransport", + "SearchTuningServiceRestTransport", + "SearchTuningServiceRestInterceptor", +) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/base.py similarity index 76% rename from packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/base.py rename to packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/base.py index 232b9f043b8c..cd575fdaf62d 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/base.py @@ -16,28 +16,30 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.api import httpbody_pb2 # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore -from google.cloud.gkeconnect.gateway_v1beta1 import gapic_version as package_version +from google.cloud.discoveryengine_v1 import gapic_version as package_version +from google.cloud.discoveryengine_v1.types import search_tuning_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) -class GatewayServiceTransport(abc.ABC): - """Abstract transport class for GatewayService.""" +class SearchTuningServiceTransport(abc.ABC): + """Abstract transport class for SearchTuningService.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - DEFAULT_HOST: str = "connectgateway.googleapis.com" + DEFAULT_HOST: str = "discoveryengine.googleapis.com" def __init__( self, @@ -56,7 +58,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to (default: 'connectgateway.googleapis.com'). + The hostname to connect to (default: 'discoveryengine.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -128,28 +130,13 @@ def host(self): def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { - self.get_resource: gapic_v1.method.wrap_method( - self.get_resource, + self.train_custom_model: gapic_v1.method.wrap_method( + self.train_custom_model, default_timeout=None, client_info=client_info, ), - self.post_resource: gapic_v1.method.wrap_method( - self.post_resource, - default_timeout=None, - client_info=client_info, - ), - self.delete_resource: gapic_v1.method.wrap_method( - self.delete_resource, - default_timeout=None, - client_info=client_info, - ), - self.put_resource: gapic_v1.method.wrap_method( - self.put_resource, - default_timeout=None, - client_info=client_info, - ), - self.patch_resource: gapic_v1.method.wrap_method( - self.patch_resource, + self.list_custom_models: gapic_v1.method.wrap_method( + self.list_custom_models, default_timeout=None, client_info=client_info, ), @@ -165,48 +152,56 @@ def close(self): raise NotImplementedError() @property - def get_resource( + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def train_custom_model( self, ) -> Callable[ - [httpbody_pb2.HttpBody], - Union[httpbody_pb2.HttpBody, Awaitable[httpbody_pb2.HttpBody]], + [search_tuning_service.TrainCustomModelRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property - def post_resource( + def list_custom_models( self, ) -> Callable[ - [httpbody_pb2.HttpBody], - Union[httpbody_pb2.HttpBody, Awaitable[httpbody_pb2.HttpBody]], + [search_tuning_service.ListCustomModelsRequest], + Union[ + search_tuning_service.ListCustomModelsResponse, + Awaitable[search_tuning_service.ListCustomModelsResponse], + ], ]: raise NotImplementedError() @property - def delete_resource( + def list_operations( self, ) -> Callable[ - [httpbody_pb2.HttpBody], - Union[httpbody_pb2.HttpBody, Awaitable[httpbody_pb2.HttpBody]], + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @property - def put_resource( + def get_operation( self, ) -> Callable[ - [httpbody_pb2.HttpBody], - Union[httpbody_pb2.HttpBody, Awaitable[httpbody_pb2.HttpBody]], + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property - def patch_resource( + def cancel_operation( self, - ) -> Callable[ - [httpbody_pb2.HttpBody], - Union[httpbody_pb2.HttpBody, Awaitable[httpbody_pb2.HttpBody]], - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: raise NotImplementedError() @property @@ -214,4 +209,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ("GatewayServiceTransport",) +__all__ = ("SearchTuningServiceTransport",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc.py similarity index 71% rename from packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/grpc.py rename to packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc.py index 69b428b95167..614fb0942cbe 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc.py @@ -16,25 +16,23 @@ from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings -from google.api import httpbody_pb2 # type: ignore -from google.api_core import gapic_v1, grpc_helpers +from google.api_core import gapic_v1, grpc_helpers, operations_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore -from .base import DEFAULT_CLIENT_INFO, GatewayServiceTransport +from google.cloud.discoveryengine_v1.types import search_tuning_service +from .base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport -class GatewayServiceGrpcTransport(GatewayServiceTransport): - """gRPC backend transport for GatewayService. - Gateway service is a public API which works as a Kubernetes - resource model proxy between end users and registered Kubernetes - clusters. Each RPC in this service matches with an HTTP verb. - End user will initiate kubectl commands against the Gateway - service, and Gateway service will forward user requests to - clusters. +class SearchTuningServiceGrpcTransport(SearchTuningServiceTransport): + """gRPC backend transport for SearchTuningService. + + Service for search tuning. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -49,7 +47,7 @@ class GatewayServiceGrpcTransport(GatewayServiceTransport): def __init__( self, *, - host: str = "connectgateway.googleapis.com", + host: str = "discoveryengine.googleapis.com", credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, @@ -67,7 +65,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to (default: 'connectgateway.googleapis.com'). + The hostname to connect to (default: 'discoveryengine.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -117,6 +115,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -189,7 +188,7 @@ def __init__( @classmethod def create_channel( cls, - host: str = "connectgateway.googleapis.com", + host: str = "discoveryengine.googleapis.com", credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, @@ -239,15 +238,32 @@ def grpc_channel(self) -> grpc.Channel: return self._grpc_channel @property - def get_resource(self) -> Callable[[httpbody_pb2.HttpBody], httpbody_pb2.HttpBody]: - r"""Return a callable for the get resource method over gRPC. + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) - GetResource performs an HTTP GET request on the - Kubernetes API Server. + # Return the client from cache. + return self._operations_client + + @property + def train_custom_model( + self, + ) -> Callable[ + [search_tuning_service.TrainCustomModelRequest], operations_pb2.Operation + ]: + r"""Return a callable for the train custom model method over gRPC. + + Trains a custom model. Returns: - Callable[[~.HttpBody], - ~.HttpBody]: + Callable[[~.TrainCustomModelRequest], + ~.Operation]: A function that, when called, will call the underlying RPC on the server. """ @@ -255,24 +271,28 @@ def get_resource(self) -> Callable[[httpbody_pb2.HttpBody], httpbody_pb2.HttpBod # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "get_resource" not in self._stubs: - self._stubs["get_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/GetResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, + if "train_custom_model" not in self._stubs: + self._stubs["train_custom_model"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SearchTuningService/TrainCustomModel", + request_serializer=search_tuning_service.TrainCustomModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["get_resource"] + return self._stubs["train_custom_model"] @property - def post_resource(self) -> Callable[[httpbody_pb2.HttpBody], httpbody_pb2.HttpBody]: - r"""Return a callable for the post resource method over gRPC. + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + search_tuning_service.ListCustomModelsResponse, + ]: + r"""Return a callable for the list custom models method over gRPC. - PostResource performs an HTTP POST on the Kubernetes - API Server. + Gets a list of all the custom models. Returns: - Callable[[~.HttpBody], - ~.HttpBody]: + Callable[[~.ListCustomModelsRequest], + ~.ListCustomModelsResponse]: A function that, when called, will call the underlying RPC on the server. """ @@ -280,99 +300,73 @@ def post_resource(self) -> Callable[[httpbody_pb2.HttpBody], httpbody_pb2.HttpBo # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "post_resource" not in self._stubs: - self._stubs["post_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/PostResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, + if "list_custom_models" not in self._stubs: + self._stubs["list_custom_models"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SearchTuningService/ListCustomModels", + request_serializer=search_tuning_service.ListCustomModelsRequest.serialize, + response_deserializer=search_tuning_service.ListCustomModelsResponse.deserialize, ) - return self._stubs["post_resource"] + return self._stubs["list_custom_models"] + + def close(self): + self.grpc_channel.close() @property - def delete_resource( + def cancel_operation( self, - ) -> Callable[[httpbody_pb2.HttpBody], httpbody_pb2.HttpBody]: - r"""Return a callable for the delete resource method over gRPC. - - DeleteResource performs an HTTP DELETE on the - Kubernetes API Server. - - Returns: - Callable[[~.HttpBody], - ~.HttpBody]: - A function that, when called, will call the underlying RPC - on the server. - """ + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_resource" not in self._stubs: - self._stubs["delete_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/DeleteResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, ) - return self._stubs["delete_resource"] + return self._stubs["cancel_operation"] @property - def put_resource(self) -> Callable[[httpbody_pb2.HttpBody], httpbody_pb2.HttpBody]: - r"""Return a callable for the put resource method over gRPC. - - PutResource performs an HTTP PUT on the Kubernetes - API Server. - - Returns: - Callable[[~.HttpBody], - ~.HttpBody]: - A function that, when called, will call the underlying RPC - on the server. - """ + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "put_resource" not in self._stubs: - self._stubs["put_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/PutResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["put_resource"] + return self._stubs["get_operation"] @property - def patch_resource( + def list_operations( self, - ) -> Callable[[httpbody_pb2.HttpBody], httpbody_pb2.HttpBody]: - r"""Return a callable for the patch resource method over gRPC. - - PatchResource performs an HTTP PATCH on the - Kubernetes API Server. - - Returns: - Callable[[~.HttpBody], - ~.HttpBody]: - A function that, when called, will call the underlying RPC - on the server. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "patch_resource" not in self._stubs: - self._stubs["patch_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/PatchResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, ) - return self._stubs["patch_resource"] - - def close(self): - self.grpc_channel.close() + return self._stubs["list_operations"] @property def kind(self) -> str: return "grpc" -__all__ = ("GatewayServiceGrpcTransport",) +__all__ = ("SearchTuningServiceGrpcTransport",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc_asyncio.py similarity index 68% rename from packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/grpc_asyncio.py rename to packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc_asyncio.py index b4f9b940f08d..3b4639ec7de9 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc_asyncio.py @@ -16,28 +16,26 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings -from google.api import httpbody_pb2 # type: ignore from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore -from .base import DEFAULT_CLIENT_INFO, GatewayServiceTransport -from .grpc import GatewayServiceGrpcTransport +from google.cloud.discoveryengine_v1.types import search_tuning_service +from .base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport +from .grpc import SearchTuningServiceGrpcTransport -class GatewayServiceGrpcAsyncIOTransport(GatewayServiceTransport): - """gRPC AsyncIO backend transport for GatewayService. - Gateway service is a public API which works as a Kubernetes - resource model proxy between end users and registered Kubernetes - clusters. Each RPC in this service matches with an HTTP verb. - End user will initiate kubectl commands against the Gateway - service, and Gateway service will forward user requests to - clusters. +class SearchTuningServiceGrpcAsyncIOTransport(SearchTuningServiceTransport): + """gRPC AsyncIO backend transport for SearchTuningService. + + Service for search tuning. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -53,7 +51,7 @@ class GatewayServiceGrpcAsyncIOTransport(GatewayServiceTransport): @classmethod def create_channel( cls, - host: str = "connectgateway.googleapis.com", + host: str = "discoveryengine.googleapis.com", credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, @@ -95,7 +93,7 @@ def create_channel( def __init__( self, *, - host: str = "connectgateway.googleapis.com", + host: str = "discoveryengine.googleapis.com", credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, @@ -113,7 +111,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to (default: 'connectgateway.googleapis.com'). + The hostname to connect to (default: 'discoveryengine.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -164,6 +162,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -243,17 +242,35 @@ def grpc_channel(self) -> aio.Channel: return self._grpc_channel @property - def get_resource( + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def train_custom_model( self, - ) -> Callable[[httpbody_pb2.HttpBody], Awaitable[httpbody_pb2.HttpBody]]: - r"""Return a callable for the get resource method over gRPC. + ) -> Callable[ + [search_tuning_service.TrainCustomModelRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the train custom model method over gRPC. - GetResource performs an HTTP GET request on the - Kubernetes API Server. + Trains a custom model. Returns: - Callable[[~.HttpBody], - Awaitable[~.HttpBody]]: + Callable[[~.TrainCustomModelRequest], + Awaitable[~.Operation]]: A function that, when called, will call the underlying RPC on the server. """ @@ -261,26 +278,28 @@ def get_resource( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "get_resource" not in self._stubs: - self._stubs["get_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/GetResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, + if "train_custom_model" not in self._stubs: + self._stubs["train_custom_model"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SearchTuningService/TrainCustomModel", + request_serializer=search_tuning_service.TrainCustomModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["get_resource"] + return self._stubs["train_custom_model"] @property - def post_resource( + def list_custom_models( self, - ) -> Callable[[httpbody_pb2.HttpBody], Awaitable[httpbody_pb2.HttpBody]]: - r"""Return a callable for the post resource method over gRPC. + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + Awaitable[search_tuning_service.ListCustomModelsResponse], + ]: + r"""Return a callable for the list custom models method over gRPC. - PostResource performs an HTTP POST on the Kubernetes - API Server. + Gets a list of all the custom models. Returns: - Callable[[~.HttpBody], - Awaitable[~.HttpBody]]: + Callable[[~.ListCustomModelsRequest], + Awaitable[~.ListCustomModelsResponse]]: A function that, when called, will call the underlying RPC on the server. """ @@ -288,127 +307,84 @@ def post_resource( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "post_resource" not in self._stubs: - self._stubs["post_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/PostResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, + if "list_custom_models" not in self._stubs: + self._stubs["list_custom_models"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SearchTuningService/ListCustomModels", + request_serializer=search_tuning_service.ListCustomModelsRequest.serialize, + response_deserializer=search_tuning_service.ListCustomModelsResponse.deserialize, ) - return self._stubs["post_resource"] + return self._stubs["list_custom_models"] - @property - def delete_resource( - self, - ) -> Callable[[httpbody_pb2.HttpBody], Awaitable[httpbody_pb2.HttpBody]]: - r"""Return a callable for the delete resource method over gRPC. + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.train_custom_model: gapic_v1.method_async.wrap_method( + self.train_custom_model, + default_timeout=None, + client_info=client_info, + ), + self.list_custom_models: gapic_v1.method_async.wrap_method( + self.list_custom_models, + default_timeout=None, + client_info=client_info, + ), + } - DeleteResource performs an HTTP DELETE on the - Kubernetes API Server. + def close(self): + return self.grpc_channel.close() - Returns: - Callable[[~.HttpBody], - Awaitable[~.HttpBody]]: - A function that, when called, will call the underlying RPC - on the server. - """ + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_resource" not in self._stubs: - self._stubs["delete_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/DeleteResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, ) - return self._stubs["delete_resource"] + return self._stubs["cancel_operation"] @property - def put_resource( + def get_operation( self, - ) -> Callable[[httpbody_pb2.HttpBody], Awaitable[httpbody_pb2.HttpBody]]: - r"""Return a callable for the put resource method over gRPC. - - PutResource performs an HTTP PUT on the Kubernetes - API Server. - - Returns: - Callable[[~.HttpBody], - Awaitable[~.HttpBody]]: - A function that, when called, will call the underlying RPC - on the server. - """ + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "put_resource" not in self._stubs: - self._stubs["put_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/PutResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs["put_resource"] + return self._stubs["get_operation"] @property - def patch_resource( + def list_operations( self, - ) -> Callable[[httpbody_pb2.HttpBody], Awaitable[httpbody_pb2.HttpBody]]: - r"""Return a callable for the patch resource method over gRPC. - - PatchResource performs an HTTP PATCH on the - Kubernetes API Server. - - Returns: - Callable[[~.HttpBody], - Awaitable[~.HttpBody]]: - A function that, when called, will call the underlying RPC - on the server. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "patch_resource" not in self._stubs: - self._stubs["patch_resource"] = self.grpc_channel.unary_unary( - "/google.cloud.gkeconnect.gateway.v1beta1.GatewayService/PatchResource", - request_serializer=httpbody_pb2.HttpBody.SerializeToString, - response_deserializer=httpbody_pb2.HttpBody.FromString, + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, ) - return self._stubs["patch_resource"] - - def _prep_wrapped_messages(self, client_info): - """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.get_resource: gapic_v1.method_async.wrap_method( - self.get_resource, - default_timeout=None, - client_info=client_info, - ), - self.post_resource: gapic_v1.method_async.wrap_method( - self.post_resource, - default_timeout=None, - client_info=client_info, - ), - self.delete_resource: gapic_v1.method_async.wrap_method( - self.delete_resource, - default_timeout=None, - client_info=client_info, - ), - self.put_resource: gapic_v1.method_async.wrap_method( - self.put_resource, - default_timeout=None, - client_info=client_info, - ), - self.patch_resource: gapic_v1.method_async.wrap_method( - self.patch_resource, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - return self.grpc_channel.close() + return self._stubs["list_operations"] -__all__ = ("GatewayServiceGrpcAsyncIOTransport",) +__all__ = ("SearchTuningServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/rest.py new file mode 100644 index 000000000000..7418b189ce0d --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/rest.py @@ -0,0 +1,1034 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import search_tuning_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SearchTuningServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SearchTuningServiceRestInterceptor: + """Interceptor for SearchTuningService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SearchTuningServiceRestTransport. + + .. code-block:: python + class MyCustomSearchTuningServiceInterceptor(SearchTuningServiceRestInterceptor): + def pre_list_custom_models(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_custom_models(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_train_custom_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_train_custom_model(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SearchTuningServiceRestTransport(interceptor=MyCustomSearchTuningServiceInterceptor()) + client = SearchTuningServiceClient(transport=transport) + + + """ + + def pre_list_custom_models( + self, + request: search_tuning_service.ListCustomModelsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + search_tuning_service.ListCustomModelsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_custom_models + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_list_custom_models( + self, response: search_tuning_service.ListCustomModelsResponse + ) -> search_tuning_service.ListCustomModelsResponse: + """Post-rpc interceptor for list_custom_models + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + def pre_train_custom_model( + self, + request: search_tuning_service.TrainCustomModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + search_tuning_service.TrainCustomModelRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for train_custom_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_train_custom_model( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for train_custom_model + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SearchTuningServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SearchTuningServiceRestInterceptor + + +class SearchTuningServiceRestTransport(SearchTuningServiceTransport): + """REST backend transport for SearchTuningService. + + Service for search tuning. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SearchTuningServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SearchTuningServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _ListCustomModels(SearchTuningServiceRestStub): + def __hash__(self): + return hash("ListCustomModels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: search_tuning_service.ListCustomModelsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> search_tuning_service.ListCustomModelsResponse: + r"""Call the list custom models method over HTTP. + + Args: + request (~.search_tuning_service.ListCustomModelsRequest): + The request object. Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.search_tuning_service.ListCustomModelsResponse: + Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{data_store=projects/*/locations/*/collections/*/dataStores/*}/customModels", + }, + ] + request, metadata = self._interceptor.pre_list_custom_models( + request, metadata + ) + pb_request = search_tuning_service.ListCustomModelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = search_tuning_service.ListCustomModelsResponse() + pb_resp = search_tuning_service.ListCustomModelsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_custom_models(resp) + return resp + + class _TrainCustomModel(SearchTuningServiceRestStub): + def __hash__(self): + return hash("TrainCustomModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: search_tuning_service.TrainCustomModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the train custom model method over HTTP. + + Args: + request (~.search_tuning_service.TrainCustomModelRequest): + The request object. Request message for + [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{data_store=projects/*/locations/*/collections/*/dataStores/*}:trainCustomModel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_train_custom_model( + request, metadata + ) + pb_request = search_tuning_service.TrainCustomModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_train_custom_model(resp) + return resp + + @property + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + search_tuning_service.ListCustomModelsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCustomModels(self._session, self._host, self._interceptor) # type: ignore + + @property + def train_custom_model( + self, + ) -> Callable[ + [search_tuning_service.TrainCustomModelRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TrainCustomModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(SearchTuningServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(SearchTuningServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(SearchTuningServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SearchTuningServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py index fc45da7663ef..319dd0689231 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py @@ -727,6 +727,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -789,6 +793,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -2299,6 +2307,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -2421,6 +2433,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py index 20dbd3c1b872..320432527156 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py @@ -53,6 +53,7 @@ from google.cloud.discoveryengine_v1.types import ( common, import_config, + purge_config, user_event, user_event_service, ) @@ -497,6 +498,110 @@ async def sample_collect_user_event(): # Done; return the response. return response + async def purge_user_events( + self, + request: Optional[Union[purge_config.PurgeUserEventsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes permanently all user events specified by the + filter provided. Depending on the number of events + specified by the filter, this operation could take hours + or days to complete. To test a filter, use the list + command first. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_purge_user_events(): + # Create a client + client = discoveryengine_v1.UserEventServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_user_events(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.PurgeUserEventsRequest, dict]]): + The request object. Request message for PurgeUserEvents + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.PurgeUserEventsResponse` Response of the PurgeUserEventsRequest. If the long running operation is + successfully done, then this message is returned by + the google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, purge_config.PurgeUserEventsRequest): + request = purge_config.PurgeUserEventsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.purge_user_events + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + purge_config.PurgeUserEventsResponse, + metadata_type=purge_config.PurgeUserEventsMetadata, + ) + + # Done; return the response. + return response + async def import_user_events( self, request: Optional[Union[import_config.ImportUserEventsRequest, dict]] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py index e2936a08eb22..c881e7747a63 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py @@ -59,6 +59,7 @@ from google.cloud.discoveryengine_v1.types import ( common, import_config, + purge_config, user_event, user_event_service, ) @@ -963,6 +964,108 @@ def sample_collect_user_event(): # Done; return the response. return response + def purge_user_events( + self, + request: Optional[Union[purge_config.PurgeUserEventsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes permanently all user events specified by the + filter provided. Depending on the number of events + specified by the filter, this operation could take hours + or days to complete. To test a filter, use the list + command first. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_purge_user_events(): + # Create a client + client = discoveryengine_v1.UserEventServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_user_events(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.PurgeUserEventsRequest, dict]): + The request object. Request message for PurgeUserEvents + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.PurgeUserEventsResponse` Response of the PurgeUserEventsRequest. If the long running operation is + successfully done, then this message is returned by + the google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, purge_config.PurgeUserEventsRequest): + request = purge_config.PurgeUserEventsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.purge_user_events] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + purge_config.PurgeUserEventsResponse, + metadata_type=purge_config.PurgeUserEventsMetadata, + ) + + # Done; return the response. + return response + def import_user_events( self, request: Optional[Union[import_config.ImportUserEventsRequest, dict]] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py index db533bd228dd..bb46c14805c3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py @@ -30,6 +30,7 @@ from google.cloud.discoveryengine_v1 import gapic_version as package_version from google.cloud.discoveryengine_v1.types import ( import_config, + purge_config, user_event, user_event_service, ) @@ -145,6 +146,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.purge_user_events: gapic_v1.method.wrap_method( + self.purge_user_events, + default_timeout=None, + client_info=client_info, + ), self.import_user_events: gapic_v1.method.wrap_method( self.import_user_events, default_retry=retries.Retry( @@ -193,6 +199,15 @@ def collect_user_event( ]: raise NotImplementedError() + @property + def purge_user_events( + self, + ) -> Callable[ + [purge_config.PurgeUserEventsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def import_user_events( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py index 29ea6149acb1..21e5a071cbb7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py @@ -27,6 +27,7 @@ from google.cloud.discoveryengine_v1.types import ( import_config, + purge_config, user_event, user_event_service, ) @@ -315,6 +316,36 @@ def collect_user_event( ) return self._stubs["collect_user_event"] + @property + def purge_user_events( + self, + ) -> Callable[[purge_config.PurgeUserEventsRequest], operations_pb2.Operation]: + r"""Return a callable for the purge user events method over gRPC. + + Deletes permanently all user events specified by the + filter provided. Depending on the number of events + specified by the filter, this operation could take hours + or days to complete. To test a filter, use the list + command first. + + Returns: + Callable[[~.PurgeUserEventsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_user_events" not in self._stubs: + self._stubs["purge_user_events"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.UserEventService/PurgeUserEvents", + request_serializer=purge_config.PurgeUserEventsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_user_events"] + @property def import_user_events( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py index 030bc370d024..06c828c190ac 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py @@ -29,6 +29,7 @@ from google.cloud.discoveryengine_v1.types import ( import_config, + purge_config, user_event, user_event_service, ) @@ -325,6 +326,38 @@ def collect_user_event( ) return self._stubs["collect_user_event"] + @property + def purge_user_events( + self, + ) -> Callable[ + [purge_config.PurgeUserEventsRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the purge user events method over gRPC. + + Deletes permanently all user events specified by the + filter provided. Depending on the number of events + specified by the filter, this operation could take hours + or days to complete. To test a filter, use the list + command first. + + Returns: + Callable[[~.PurgeUserEventsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_user_events" not in self._stubs: + self._stubs["purge_user_events"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.UserEventService/PurgeUserEvents", + request_serializer=purge_config.PurgeUserEventsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_user_events"] + @property def import_user_events( self, @@ -373,6 +406,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.purge_user_events: gapic_v1.method_async.wrap_method( + self.purge_user_events, + default_timeout=None, + client_info=client_info, + ), self.import_user_events: gapic_v1.method_async.wrap_method( self.import_user_events, default_retry=retries.AsyncRetry( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py index 2f87a0777f58..95798800bf12 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py @@ -48,6 +48,7 @@ from google.cloud.discoveryengine_v1.types import ( import_config, + purge_config, user_event, user_event_service, ) @@ -93,6 +94,14 @@ def post_import_user_events(self, response): logging.log(f"Received response: {response}") return response + def pre_purge_user_events(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_purge_user_events(self, response): + logging.log(f"Received response: {response}") + return response + def pre_write_user_event(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -153,6 +162,29 @@ def post_import_user_events( """ return response + def pre_purge_user_events( + self, + request: purge_config.PurgeUserEventsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[purge_config.PurgeUserEventsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for purge_user_events + + Override in a subclass to manipulate the request or metadata + before they are sent to the UserEventService server. + """ + return request, metadata + + def post_purge_user_events( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for purge_user_events + + Override in a subclass to manipulate the response + after it is returned by the UserEventService server but before + it is returned to user code. + """ + return response + def pre_write_user_event( self, request: user_event_service.WriteUserEventRequest, @@ -422,6 +454,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -484,6 +520,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -759,6 +799,107 @@ def __call__( resp = self._interceptor.post_import_user_events(resp) return resp + class _PurgeUserEvents(UserEventServiceRestStub): + def __hash__(self): + return hash("PurgeUserEvents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: purge_config.PurgeUserEventsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the purge user events method over HTTP. + + Args: + request (~.purge_config.PurgeUserEventsRequest): + The request object. Request message for PurgeUserEvents + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*}/userEvents:purge", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/userEvents:purge", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_purge_user_events( + request, metadata + ) + pb_request = purge_config.PurgeUserEventsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_purge_user_events(resp) + return resp + class _WriteUserEvent(UserEventServiceRestStub): def __hash__(self): return hash("WriteUserEvent") @@ -884,6 +1025,14 @@ def import_user_events( # In C++ this would require a dynamic_cast return self._ImportUserEvents(self._session, self._host, self._interceptor) # type: ignore + @property + def purge_user_events( + self, + ) -> Callable[[purge_config.PurgeUserEventsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PurgeUserEvents(self._session, self._host, self._interceptor) # type: ignore + @property def write_user_event( self, @@ -1048,6 +1197,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1170,6 +1323,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py index 79d6e4f7f3e3..8804192f3d63 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py @@ -62,6 +62,7 @@ UpdateConversationRequest, UpdateSessionRequest, ) +from .custom_tuning_model import CustomTuningModel from .data_store import DataStore from .data_store_service import ( CreateDataStoreMetadata, @@ -76,6 +77,8 @@ from .document import Document from .document_processing_config import DocumentProcessingConfig from .document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -133,9 +136,13 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeErrorConfig, PurgeSuggestionDenyListEntriesMetadata, PurgeSuggestionDenyListEntriesRequest, PurgeSuggestionDenyListEntriesResponse, + PurgeUserEventsMetadata, + PurgeUserEventsRequest, + PurgeUserEventsResponse, ) from .rank_service import RankingRecord, RankRequest, RankResponse from .recommendation_service import RecommendRequest, RecommendResponse @@ -152,6 +159,13 @@ UpdateSchemaRequest, ) from .search_service import SearchRequest, SearchResponse +from .search_tuning_service import ( + ListCustomModelsRequest, + ListCustomModelsResponse, + TrainCustomModelMetadata, + TrainCustomModelRequest, + TrainCustomModelResponse, +) from .session import Query, Session from .site_search_engine import SiteSearchEngine, SiteVerificationInfo, TargetSite from .site_search_engine_service import ( @@ -240,6 +254,7 @@ "ListSessionsResponse", "UpdateConversationRequest", "UpdateSessionRequest", + "CustomTuningModel", "DataStore", "CreateDataStoreMetadata", "CreateDataStoreRequest", @@ -251,6 +266,8 @@ "UpdateDataStoreRequest", "Document", "DocumentProcessingConfig", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "CreateDocumentRequest", "DeleteDocumentRequest", "GetDocumentRequest", @@ -302,9 +319,13 @@ "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeErrorConfig", "PurgeSuggestionDenyListEntriesMetadata", "PurgeSuggestionDenyListEntriesRequest", "PurgeSuggestionDenyListEntriesResponse", + "PurgeUserEventsMetadata", + "PurgeUserEventsRequest", + "PurgeUserEventsResponse", "RankingRecord", "RankRequest", "RankResponse", @@ -322,6 +343,11 @@ "UpdateSchemaRequest", "SearchRequest", "SearchResponse", + "ListCustomModelsRequest", + "ListCustomModelsResponse", + "TrainCustomModelMetadata", + "TrainCustomModelRequest", + "TrainCustomModelResponse", "Query", "Session", "SiteSearchEngine", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/answer.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/answer.py index 8615cafa87d4..290bf922e3c3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/answer.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/answer.py @@ -105,6 +105,18 @@ class AnswerSkippedReason(proto.Enum): Google skips the answer if there is no relevant content in the retrieved search results. + JAIL_BREAKING_QUERY_IGNORED (6): + The jail-breaking query ignored case. + + For example, "Reply in the tone of a competing + company's CEO". Google skips the answer if the + query is classified as a jail-breaking query. + CUSTOMER_POLICY_VIOLATION (7): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. """ ANSWER_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -112,6 +124,8 @@ class AnswerSkippedReason(proto.Enum): OUT_OF_DOMAIN_QUERY_IGNORED = 3 POTENTIAL_POLICY_VIOLATION = 4 NO_RELEVANT_CONTENT = 5 + JAIL_BREAKING_QUERY_IGNORED = 6 + CUSTOMER_POLICY_VIOLATION = 7 class Citation(proto.Message): r"""Citation info for a segment. @@ -171,6 +185,10 @@ class Reference(proto.Message): chunk_info (google.cloud.discoveryengine_v1.types.Answer.Reference.ChunkInfo): Chunk information. + This field is a member of `oneof`_ ``content``. + structured_document_info (google.cloud.discoveryengine_v1.types.Answer.Reference.StructuredDocumentInfo): + Structured document information. + This field is a member of `oneof`_ ``content``. """ @@ -196,11 +214,22 @@ class UnstructuredDocumentInfo(proto.Message): class ChunkContent(proto.Message): r"""Chunk content. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: content (str): Chunk textual content. page_identifier (str): Page identifier. + relevance_score (float): + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. + + This field is a member of `oneof`_ ``_relevance_score``. """ content: str = proto.Field( @@ -211,6 +240,11 @@ class ChunkContent(proto.Message): proto.STRING, number=2, ) + relevance_score: float = proto.Field( + proto.FLOAT, + number=3, + optional=True, + ) document: str = proto.Field( proto.STRING, @@ -248,7 +282,12 @@ class ChunkInfo(proto.Message): content (str): Chunk textual content. relevance_score (float): - Relevance score. + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. This field is a member of `oneof`_ ``_relevance_score``. document_metadata (google.cloud.discoveryengine_v1.types.Answer.Reference.ChunkInfo.DocumentMetadata): @@ -316,6 +355,26 @@ class DocumentMetadata(proto.Message): ) ) + class StructuredDocumentInfo(proto.Message): + r"""Structured search information. + + Attributes: + document (str): + Document resource name. + struct_data (google.protobuf.struct_pb2.Struct): + Structured search data. + """ + + document: str = proto.Field( + proto.STRING, + number=1, + ) + struct_data: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + unstructured_document_info: "Answer.Reference.UnstructuredDocumentInfo" = ( proto.Field( proto.MESSAGE, @@ -330,6 +389,14 @@ class DocumentMetadata(proto.Message): oneof="content", message="Answer.Reference.ChunkInfo", ) + structured_document_info: "Answer.Reference.StructuredDocumentInfo" = ( + proto.Field( + proto.MESSAGE, + number=3, + oneof="content", + message="Answer.Reference.StructuredDocumentInfo", + ) + ) class Step(proto.Message): r"""Step information. @@ -417,11 +484,10 @@ class SearchResult(proto.Message): If citation_type is CHUNK_LEVEL_CITATION and chunk mode is on, populate chunk info. struct_data (google.protobuf.struct_pb2.Struct): - Data representation. The structured JSON data for the - document. It's populated from the struct data from the - Document (code pointer: http://shortn/_objzAfIiHq), or the - Chunk in search result (code pointer: - http://shortn/_Ipo6KFFGBL). + Data representation. + The structured JSON data for the document. + It's populated from the struct data from the + Document, or the Chunk in search result. """ class SnippetInfo(proto.Message): @@ -455,7 +521,12 @@ class ChunkInfo(proto.Message): content (str): Chunk textual content. relevance_score (float): - Relevance score. + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. This field is a member of `oneof`_ ``_relevance_score``. """ @@ -573,10 +644,13 @@ class Type(proto.Enum): Adversarial query classification type. NON_ANSWER_SEEKING_QUERY (2): Non-answer-seeking query classification type. + JAIL_BREAKING_QUERY (3): + Jail-breaking query classification type. """ TYPE_UNSPECIFIED = 0 ADVERSARIAL_QUERY = 1 NON_ANSWER_SEEKING_QUERY = 2 + JAIL_BREAKING_QUERY = 3 type_: "Answer.QueryUnderstandingInfo.QueryClassificationInfo.Type" = ( proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control.py index db871cdc66b1..2e7748c91522 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control.py @@ -111,8 +111,10 @@ class TimeRange(proto.Message): class Control(proto.Message): r"""Defines a conditioned behavior to employ during serving. Must be - attached to a [ServingConfig][] to be considered at serving time. - Permitted actions dependent on ``SolutionType``. + attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] to be + considered at serving time. Permitted actions dependent on + ``SolutionType``. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -150,9 +152,10 @@ class Control(proto.Message): 128 characters. Otherwise an INVALID ARGUMENT error is thrown. associated_serving_config_ids (MutableSequence[str]): - Output only. List of all [ServingConfig][] ids this control - is attached to. May take up to 10 minutes to update after - changes. + Output only. List of all + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + IDs this control is attached to. May take up to 10 minutes + to update after changes. solution_type (google.cloud.discoveryengine_v1.types.SolutionType): Required. Immutable. What solution the control belongs to. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py index 61b087c0b75e..b979c3de69bf 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py @@ -654,14 +654,8 @@ class SearchParams(proto.Message): returned. search_result_mode (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec.SearchResultMode): Specifies the search result mode. If unspecified, the search - result mode is based on - [DataStore.DocumentProcessingConfig.chunking_config][]: - - - If [DataStore.DocumentProcessingConfig.chunking_config][] - is specified, it defaults to ``CHUNKS``. - - Otherwise, it defaults to ``DOCUMENTS``. See `parse and - chunk - documents `__ + result mode defaults to ``DOCUMENTS``. See `parse and chunk + documents `__ data_store_specs (MutableSequence[google.cloud.discoveryengine_v1.types.SearchRequest.DataStoreSpec]): Specs defining dataStores to filter on in a search call and configurations for those @@ -924,10 +918,13 @@ class Type(proto.Enum): Adversarial query classification type. NON_ANSWER_SEEKING_QUERY (2): Non-answer-seeking query classification type. + JAIL_BREAKING_QUERY (3): + Jail-breaking query classification type. """ TYPE_UNSPECIFIED = 0 ADVERSARIAL_QUERY = 1 NON_ANSWER_SEEKING_QUERY = 2 + JAIL_BREAKING_QUERY = 3 types: MutableSequence[ "AnswerQueryRequest.QueryUnderstandingSpec.QueryClassificationSpec.Type" @@ -1031,8 +1028,8 @@ class AnswerQueryResponse(proto.Message): Attributes: answer (google.cloud.discoveryengine_v1.types.Answer): Answer resource object. If - [AnswerQueryRequest.StepSpec.max_step_count][] is greater - than 1, use + [AnswerQueryRequest.QueryUnderstandingSpec.QueryRephraserSpec.max_rephrase_steps][google.cloud.discoveryengine.v1.AnswerQueryRequest.QueryUnderstandingSpec.QueryRephraserSpec.max_rephrase_steps] + is greater than 1, use [Answer.name][google.cloud.discoveryengine.v1.Answer.name] to fetch answer information using [ConversationalSearchService.GetAnswer][google.cloud.discoveryengine.v1.ConversationalSearchService.GetAnswer] diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/custom_tuning_model.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/custom_tuning_model.py new file mode 100644 index 000000000000..6b1a58131a45 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/custom_tuning_model.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "CustomTuningModel", + }, +) + + +class CustomTuningModel(proto.Message): + r"""Metadata that describes a custom tuned model. + + Attributes: + name (str): + Required. The fully qualified resource name of the model. + + Format: + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/customTuningModels/{custom_tuning_model}`` + model must be an alpha-numerical string with limit of 40 + characters. + display_name (str): + The display name of the model. + model_version (int): + The version of the model. + model_state (google.cloud.discoveryengine_v1.types.CustomTuningModel.ModelState): + The state that the model is in (e.g.``TRAINING`` or + ``TRAINING_FAILED``). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Deprecated: timestamp the Model was created + at. + training_start_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp the model training was initiated. + metrics (MutableMapping[str, float]): + The metrics of the trained model. + """ + + class ModelState(proto.Enum): + r"""The state of the model. + + Values: + MODEL_STATE_UNSPECIFIED (0): + Default value. + TRAINING_PAUSED (1): + The model is in a paused training state. + TRAINING (2): + The model is currently training. + TRAINING_COMPLETE (3): + The model has successfully completed + training. + READY_FOR_SERVING (4): + The model is ready for serving. + TRAINING_FAILED (5): + The model training failed. + NO_IMPROVEMENT (6): + The model training finished successfully but + metrics did not improve. + INPUT_VALIDATION_FAILED (7): + Input data validation failed. Model training + didn't start. + """ + MODEL_STATE_UNSPECIFIED = 0 + TRAINING_PAUSED = 1 + TRAINING = 2 + TRAINING_COMPLETE = 3 + READY_FOR_SERVING = 4 + TRAINING_FAILED = 5 + NO_IMPROVEMENT = 6 + INPUT_VALIDATION_FAILED = 7 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + model_version: int = proto.Field( + proto.INT64, + number=3, + ) + model_state: ModelState = proto.Field( + proto.ENUM, + number=4, + enum=ModelState, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + training_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + metrics: MutableMapping[str, float] = proto.MapField( + proto.STRING, + proto.DOUBLE, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py index 89f4d5d68ccc..65647f1a300a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py @@ -67,6 +67,17 @@ class CreateDataStoreRequest(proto.Message): create an advanced data store for site search. If the data store is not configured as site search (GENERIC vertical and PUBLIC_WEBSITE content_config), this flag will be ignored. + skip_default_schema_creation (bool): + A boolean flag indicating whether to skip the default schema + creation for the data store. Only enable this flag if you + are certain that the default schema is incompatible with + your use case. + + If set to true, you must manually create a schema for the + data store before any documents can be ingested. + + This flag cannot be specified if + ``data_store.starting_schema`` is specified. """ parent: str = proto.Field( @@ -86,6 +97,10 @@ class CreateDataStoreRequest(proto.Message): proto.BOOL, number=4, ) + skip_default_schema_creation: bool = proto.Field( + proto.BOOL, + number=7, + ) class GetDataStoreRequest(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py index 0d4d2ad7fa5b..b613e83e20a2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py @@ -19,6 +19,7 @@ from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -91,6 +92,14 @@ class Document(proto.Message): This field is OUTPUT_ONLY. If this field is not populated, it means the document has never been indexed. + index_status (google.cloud.discoveryengine_v1.types.Document.IndexStatus): + Output only. The index status of the document. + + - If document is indexed successfully, the index_time field + is populated. + - Otherwise, if document is not indexed due to errors, the + error_samples field is populated. + - Otherwise, index_status is unset. """ class Content(proto.Message): @@ -154,6 +163,31 @@ class Content(proto.Message): number=1, ) + class IndexStatus(proto.Message): + r"""Index status of the document. + + Attributes: + index_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the document was indexed. + If this field is populated, it means the + document has been indexed. + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while indexing + the document. If this field is populated, the + document is not indexed due to errors. + """ + + index_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + struct_data: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=4, @@ -196,6 +230,11 @@ class Content(proto.Message): number=13, message=timestamp_pb2.Timestamp, ) + index_status: IndexStatus = proto.Field( + proto.MESSAGE, + number=15, + message=IndexStatus, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_processing_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_processing_config.py index 99d5a7597569..7f19cb717e94 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_processing_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_processing_config.py @@ -61,6 +61,8 @@ class DocumentProcessingConfig(proto.Message): digital parsing and layout parsing are supported. - ``pptx``: Override parsing config for PPTX files, only digital parsing and layout parsing are supported. + - ``xlsx``: Override parsing config for XLSX files, only + digital parsing and layout parsing are supported. """ class ChunkingConfig(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py index 7e8d2b5f8263..39388da42c72 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.cloud.discoveryengine_v1.types import document as gcd_document @@ -31,6 +32,8 @@ "CreateDocumentRequest", "UpdateDocumentRequest", "DeleteDocumentRequest", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", }, ) @@ -268,4 +271,173 @@ class DeleteDocumentRequest(proto.Message): ) +class BatchGetDocumentsMetadataRequest(proto.Message): + r"""Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + matcher (google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest.Matcher): + Required. Matcher for the + [Document][google.cloud.discoveryengine.v1.Document]s. + """ + + class UrisMatcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1.Document]s by exact uris. + + Attributes: + uris (MutableSequence[str]): + The exact URIs to match by. + """ + + uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class Matcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1.Document]s. Currently + supports matching by exact URIs. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uris_matcher (google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest.UrisMatcher): + Matcher by exact URIs. + + This field is a member of `oneof`_ ``matcher``. + """ + + uris_matcher: "BatchGetDocumentsMetadataRequest.UrisMatcher" = proto.Field( + proto.MESSAGE, + number=1, + oneof="matcher", + message="BatchGetDocumentsMetadataRequest.UrisMatcher", + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + matcher: Matcher = proto.Field( + proto.MESSAGE, + number=2, + message=Matcher, + ) + + +class BatchGetDocumentsMetadataResponse(proto.Message): + r"""Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + documents_metadata (MutableSequence[google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse.DocumentMetadata]): + The metadata of the + [Document][google.cloud.discoveryengine.v1.Document]s. + """ + + class State(proto.Enum): + r"""The state of the + [Document][google.cloud.discoveryengine.v1.Document]. + + Values: + STATE_UNSPECIFIED (0): + Should never be set. + INDEXED (1): + The [Document][google.cloud.discoveryengine.v1.Document] is + indexed. + NOT_IN_TARGET_SITE (2): + The [Document][google.cloud.discoveryengine.v1.Document] is + not indexed because its URI is not in the + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + NOT_IN_INDEX (3): + The [Document][google.cloud.discoveryengine.v1.Document] is + not indexed. + """ + STATE_UNSPECIFIED = 0 + INDEXED = 1 + NOT_IN_TARGET_SITE = 2 + NOT_IN_INDEX = 3 + + class DocumentMetadata(proto.Message): + r"""The metadata of a + [Document][google.cloud.discoveryengine.v1.Document]. + + Attributes: + matcher_value (google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue): + The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1.Document]. + state (google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse.State): + The state of the document. + last_refreshed_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp of the last time the + [Document][google.cloud.discoveryengine.v1.Document] was + last indexed. + data_ingestion_source (str): + The data ingestion source of the + [Document][google.cloud.discoveryengine.v1.Document]. + + Allowed values are: + + - ``batch``: Data ingested via Batch API, e.g., + ImportDocuments. + - ``streaming`` Data ingested via Streaming API, e.g., FHIR + streaming. + """ + + class MatcherValue(proto.Message): + r"""The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1.Document]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + If match by URI, the URI of the + [Document][google.cloud.discoveryengine.v1.Document]. + + This field is a member of `oneof`_ ``matcher_value``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + oneof="matcher_value", + ) + + matcher_value: "BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue" = proto.Field( + proto.MESSAGE, + number=2, + message="BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue", + ) + state: "BatchGetDocumentsMetadataResponse.State" = proto.Field( + proto.ENUM, + number=3, + enum="BatchGetDocumentsMetadataResponse.State", + ) + last_refreshed_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + data_ingestion_source: str = proto.Field( + proto.STRING, + number=5, + ) + + documents_metadata: MutableSequence[DocumentMetadata] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=DocumentMetadata, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounded_generation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounded_generation_service.py index 36de20d74df6..009d4e1badfd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounded_generation_service.py @@ -180,8 +180,9 @@ class Claim(proto.Message): false. In that case, no grounding check was done for the claim and therefore [citation_indices][google.cloud.discoveryengine.v1.CheckGroundingResponse.Claim.citation_indices], + [anti_citation_indices][google.cloud.discoveryengine.v1.CheckGroundingResponse.Claim.anti_citation_indices], and - [anti_citation_indices][google.cloud.discoveryengine.v1.CheckGroundingResponse.Claim.anti_citation_indices] + [score][google.cloud.discoveryengine.v1.CheckGroundingResponse.Claim.score] should not be returned. This field is a member of `oneof`_ ``_grounding_check_required``. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py index bade57407d0a..7c238cb1b194 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py @@ -121,10 +121,10 @@ class BigQuerySource(proto.Message): This field is a member of `oneof`_ ``partition``. project_id (str): - The project ID (can be project # or ID) that - the BigQuery source is in with a length limit of - 128 characters. If not specified, inherits the - project ID from the parent request. + The project ID or the project number that + contains the BigQuery source. Has a length limit + of 128 characters. If not specified, inherits + the project ID from the parent request. dataset_id (str): Required. The BigQuery data set to copy the data from with a length limit of 1,024 @@ -197,9 +197,9 @@ class SpannerSource(proto.Message): Attributes: project_id (str): - The project ID that the Spanner source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Spanner + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the source @@ -420,9 +420,9 @@ class BigtableSource(proto.Message): Attributes: project_id (str): - The project ID that the Bigtable source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Bigtable + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the Cloud @@ -470,6 +470,11 @@ class FhirStoreSource(proto.Message): characters. Can be specified if one wants to have the FhirStore export to a specific Cloud Storage directory. + resource_types (MutableSequence[str]): + The FHIR resource types to import. The resource types should + be a subset of all `supported FHIR resource + types `__. + Default to all supported FHIR resource types if empty. """ fhir_store: str = proto.Field( @@ -480,6 +485,10 @@ class FhirStoreSource(proto.Message): proto.STRING, number=2, ) + resource_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class CloudSqlSource(proto.Message): @@ -487,9 +496,9 @@ class CloudSqlSource(proto.Message): Attributes: project_id (str): - The project ID that the Cloud SQL source is - in with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Cloud SQL + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The Cloud SQL instance to copy the @@ -547,9 +556,9 @@ class AlloyDbSource(proto.Message): Attributes: project_id (str): - The project ID that the AlloyDB source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the AlloyDB + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. location_id (str): Required. The AlloyDB location to copy the diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py index 6a4b091e5ba5..6633bc93f7a8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py @@ -21,9 +21,15 @@ from google.rpc import status_pb2 # type: ignore import proto # type: ignore +from google.cloud.discoveryengine_v1.types import import_config + __protobuf__ = proto.module( package="google.cloud.discoveryengine.v1", manifest={ + "PurgeUserEventsRequest", + "PurgeUserEventsResponse", + "PurgeUserEventsMetadata", + "PurgeErrorConfig", "PurgeDocumentsRequest", "PurgeDocumentsResponse", "PurgeDocumentsMetadata", @@ -37,24 +43,211 @@ ) +class PurgeUserEventsRequest(proto.Message): + r"""Request message for PurgeUserEvents method. + + Attributes: + parent (str): + Required. The resource name of the catalog under which the + events are created. The format is + ``projects/{project}/locations/global/collections/{collection}/dataStores/{dataStore}`` + filter (str): + Required. The filter string to specify the events to be + deleted with a length limit of 5,000 characters. The + eligible fields for filtering are: + + - ``eventType``: Double quoted + [UserEvent.event_type][google.cloud.discoveryengine.v1.UserEvent.event_type] + string. + - ``eventTime``: in ISO 8601 "zulu" format. + - ``userPseudoId``: Double quoted string. Specifying this + will delete all events associated with a visitor. + - ``userId``: Double quoted string. Specifying this will + delete all events associated with a user. + + Examples: + + - Deleting all events in a time range: + ``eventTime > "2012-04-23T18:25:43.511Z" eventTime < "2012-04-23T18:30:43.511Z"`` + - Deleting specific eventType: ``eventType = "search"`` + - Deleting all events for a specific visitor: + ``userPseudoId = "visitor1024"`` + - Deleting all events inside a DataStore: ``*`` + + The filtering fields are assumed to have an implicit AND. + force (bool): + The ``force`` field is currently not supported. Purge user + event requests will permanently delete all purgeable events. + Once the development is complete: If ``force`` is set to + false, the method will return the expected purge count + without deleting any user events. This field will default to + false if not included in the request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class PurgeUserEventsResponse(proto.Message): + r"""Response of the PurgeUserEventsRequest. If the long running + operation is successfully done, then this message is returned by + the google.longrunning.Operations.response field. + + Attributes: + purge_count (int): + The total count of events purged as a result + of the operation. + """ + + purge_count: int = proto.Field( + proto.INT64, + number=1, + ) + + +class PurgeUserEventsMetadata(proto.Message): + r"""Metadata related to the progress of the PurgeUserEvents + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + success_count (int): + Count of entries that were deleted + successfully. + failure_count (int): + Count of entries that encountered errors + while processing. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + success_count: int = proto.Field( + proto.INT64, + number=3, + ) + failure_count: int = proto.Field( + proto.INT64, + number=4, + ) + + +class PurgeErrorConfig(proto.Message): + r"""Configuration of destination for Purge related errors. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_prefix (str): + Cloud Storage prefix for purge errors. This must be an + empty, existing Cloud Storage directory. Purge errors are + written to sharded files in this directory, one per line, as + a JSON-encoded ``google.rpc.Status`` message. + + This field is a member of `oneof`_ ``destination``. + """ + + gcs_prefix: str = proto.Field( + proto.STRING, + number=1, + oneof="destination", + ) + + class PurgeDocumentsRequest(proto.Message): r"""Request message for [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments] method. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: + gcs_source (google.cloud.discoveryengine_v1.types.GcsSource): + Cloud Storage location for the input content. Supported + ``data_schema``: + + - ``document_id``: One valid + [Document.id][google.cloud.discoveryengine.v1.Document.id] + per line. + + This field is a member of `oneof`_ ``source``. + inline_source (google.cloud.discoveryengine_v1.types.PurgeDocumentsRequest.InlineSource): + Inline source for the input content for + purge. + + This field is a member of `oneof`_ ``source``. parent (str): Required. The parent resource name, such as ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. filter (str): Required. Filter matching documents to purge. Only currently supported value is ``*`` (all items). + error_config (google.cloud.discoveryengine_v1.types.PurgeErrorConfig): + The desired location of errors incurred + during the purge. force (bool): Actually performs the purge. If ``force`` is set to false, return the expected purge count without deleting any documents. """ + class InlineSource(proto.Message): + r"""The inline source for the input config for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments] + method. + + Attributes: + documents (MutableSequence[str]): + Required. A list of full resource name of documents to + purge. In the format + ``projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*``. + Recommended max of 100 items. + """ + + documents: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + gcs_source: import_config.GcsSource = proto.Field( + proto.MESSAGE, + number=5, + oneof="source", + message=import_config.GcsSource, + ) + inline_source: InlineSource = proto.Field( + proto.MESSAGE, + number=6, + oneof="source", + message=InlineSource, + ) parent: str = proto.Field( proto.STRING, number=1, @@ -63,6 +256,11 @@ class PurgeDocumentsRequest(proto.Message): proto.STRING, number=2, ) + error_config: "PurgeErrorConfig" = proto.Field( + proto.MESSAGE, + number=7, + message="PurgeErrorConfig", + ) force: bool = proto.Field( proto.BOOL, number=3, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/recommendation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/recommendation_service.py index f225dff737b9..ae0cbbd72035 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/recommendation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/recommendation_service.py @@ -37,7 +37,8 @@ class RecommendRequest(proto.Message): Attributes: serving_config (str): - Required. Full resource name of a [ServingConfig][]: + Required. Full resource name of a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig]: ``projects/*/locations/global/collections/*/engines/*/servingConfigs/*``, or ``projects/*/locations/global/collections/*/dataStores/*/servingConfigs/*`` diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py index 082241aaa2dd..4c31c629c97b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py @@ -139,9 +139,12 @@ class SearchRequest(proto.Message): Leave it unset if ordered by relevance. ``order_by`` expression is case-sensitive. - For more information on ordering for retail search, see - `Ordering `__ - + For more information on ordering the website search results, + see `Order web search + results `__. + For more information on ordering the healthcare search + results, see `Order healthcare search + results `__. If this field is unrecognizable, an ``INVALID_ARGUMENT`` is returned. user_info (google.cloud.discoveryengine_v1.types.UserInfo): @@ -682,12 +685,7 @@ class ContentSearchSpec(proto.Message): be no extractive answer in the search response. search_result_mode (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec.SearchResultMode): Specifies the search result mode. If unspecified, the search - result mode is based on - [DataStore.DocumentProcessingConfig.chunking_config][]: - - - If [DataStore.DocumentProcessingConfig.chunking_config][] - is specified, it defaults to ``CHUNKS``. - - Otherwise, it defaults to ``DOCUMENTS``. + result mode defaults to ``DOCUMENTS``. chunk_spec (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec.ChunkSpec): Specifies the chunk spec to be returned from the search response. Only available if the @@ -698,12 +696,7 @@ class ContentSearchSpec(proto.Message): class SearchResultMode(proto.Enum): r"""Specifies the search result mode. If unspecified, the search result - mode is based on - [DataStore.DocumentProcessingConfig.chunking_config][]: - - - If [DataStore.DocumentProcessingConfig.chunking_config][] is - specified, it defaults to ``CHUNKS``. - - Otherwise, it defaults to ``DOCUMENTS``. + mode defaults to ``DOCUMENTS``. Values: SEARCH_RESULT_MODE_UNSPECIFIED (0): @@ -815,6 +808,14 @@ class SummarySpec(proto.Message): navigational queries. If this field is set to ``true``, we skip generating summaries for non-summary seeking queries and return fallback messages instead. + ignore_low_relevant_content (bool): + Specifies whether to filter out queries that have low + relevance. The default value is ``false``. + + If this field is set to ``false``, all search results are + used regardless of relevance to generate answers. If set to + ``true``, only queries with high relevance search results + will generate answers. model_prompt_spec (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec): If specified, the spec will be used to modify the prompt provided to the LLM. @@ -892,6 +893,10 @@ class ModelSpec(proto.Message): proto.BOOL, number=4, ) + ignore_low_relevant_content: bool = proto.Field( + proto.BOOL, + number=9, + ) model_prompt_spec: "SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec" = proto.Field( proto.MESSAGE, number=5, @@ -1275,7 +1280,8 @@ class SearchResponse(proto.Message): A unique search token. This should be included in the [UserEvent][google.cloud.discoveryengine.v1.UserEvent] logs resulting from this search, which enables accurate - attribution of search model performance. + attribution of search model performance. This also helps to + identify a request during the customer support scenarios. redirect_uri (str): The URI of a customer-defined redirect page. If redirect action is triggered, no search is performed, and only @@ -1442,13 +1448,13 @@ class SummarySkippedReason(proto.Enum): ADVERSARIAL_QUERY_IGNORED (1): The adversarial query ignored case. - Only populated when + Only used when [SummarySpec.ignore_adversarial_query][google.cloud.discoveryengine.v1.SearchRequest.ContentSearchSpec.SummarySpec.ignore_adversarial_query] is set to ``true``. NON_SUMMARY_SEEKING_QUERY_IGNORED (2): The non-summary seeking query ignored case. - Only populated when + Only used when [SummarySpec.ignore_non_summary_seeking_query][google.cloud.discoveryengine.v1.SearchRequest.ContentSearchSpec.SummarySpec.ignore_non_summary_seeking_query] is set to ``true``. OUT_OF_DOMAIN_QUERY_IGNORED (3): @@ -1470,6 +1476,24 @@ class SummarySkippedReason(proto.Enum): Google skips the summary if the LLM addon is not enabled. + NO_RELEVANT_CONTENT (6): + The no relevant content case. + + Google skips the summary if there is no relevant + content in the retrieved search results. + JAIL_BREAKING_QUERY_IGNORED (7): + The jail-breaking query ignored case. + + For example, "Reply in the tone of a competing company's + CEO". Only used when + [SearchRequest.ContentSearchSpec.SummarySpec.ignore_jail_breaking_query] + is set to ``true``. + CUSTOMER_POLICY_VIOLATION (8): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. """ SUMMARY_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -1477,6 +1501,9 @@ class SummarySkippedReason(proto.Enum): OUT_OF_DOMAIN_QUERY_IGNORED = 3 POTENTIAL_POLICY_VIOLATION = 4 LLM_ADDON_NOT_ENABLED = 5 + NO_RELEVANT_CONTENT = 6 + JAIL_BREAKING_QUERY_IGNORED = 7 + CUSTOMER_POLICY_VIOLATION = 8 class SafetyAttributes(proto.Message): r"""Safety Attribute categories and their associated confidence diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_tuning_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_tuning_service.py new file mode 100644 index 000000000000..d32623e52681 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_tuning_service.py @@ -0,0 +1,271 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1.types import custom_tuning_model, import_config + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "ListCustomModelsRequest", + "ListCustomModelsResponse", + "TrainCustomModelRequest", + "TrainCustomModelResponse", + "TrainCustomModelMetadata", + }, +) + + +class ListCustomModelsRequest(proto.Message): + r"""Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + + Attributes: + data_store (str): + Required. The resource name of the parent Data Store, such + as + ``projects/*/locations/global/collections/default_collection/dataStores/default_data_store``. + This field is used to identify the data store where to fetch + the models from. + """ + + data_store: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListCustomModelsResponse(proto.Message): + r"""Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + + Attributes: + models (MutableSequence[google.cloud.discoveryengine_v1.types.CustomTuningModel]): + List of custom tuning models. + """ + + models: MutableSequence[ + custom_tuning_model.CustomTuningModel + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=custom_tuning_model.CustomTuningModel, + ) + + +class TrainCustomModelRequest(proto.Message): + r"""Request message for + [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel] + method. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_training_input (google.cloud.discoveryengine_v1.types.TrainCustomModelRequest.GcsTrainingInput): + Cloud Storage training input. + + This field is a member of `oneof`_ ``training_input``. + data_store (str): + Required. The resource name of the Data Store, such as + ``projects/*/locations/global/collections/default_collection/dataStores/default_data_store``. + This field is used to identify the data store where to train + the models. + model_type (str): + Model to be trained. Supported values are: + + - **search-tuning**: Fine tuning the search system based on + data provided. + error_config (google.cloud.discoveryengine_v1.types.ImportErrorConfig): + The desired location of errors incurred + during the data ingestion and training. + model_id (str): + If not provided, a UUID will be generated. + """ + + class GcsTrainingInput(proto.Message): + r"""Cloud Storage training data input. + + Attributes: + corpus_data_path (str): + The Cloud Storage corpus data which could be associated in + train data. The data path format is + ``gs:///``. A newline + delimited jsonl/ndjson file. + + For search-tuning model, each line should have the \_id, + title and text. Example: + ``{"_id": "doc1", title: "relevant doc", "text": "relevant text"}`` + query_data_path (str): + The gcs query data which could be associated in train data. + The data path format is + ``gs:///``. A newline + delimited jsonl/ndjson file. + + For search-tuning model, each line should have the \_id and + text. Example: {"_id": "query1", "text": "example query"} + train_data_path (str): + Cloud Storage training data path whose format should be + ``gs:///``. The file should + be in tsv format. Each line should have the doc_id and + query_id and score (number). + + For search-tuning model, it should have the query-id + corpus-id score as tsv file header. The score should be a + number in ``[0, inf+)``. The larger the number is, the more + relevant the pair is. Example: + + - ``query-id\tcorpus-id\tscore`` + - ``query1\tdoc1\t1`` + test_data_path (str): + Cloud Storage test data. Same format as train_data_path. If + not provided, a random 80/20 train/test split will be + performed on train_data_path. + """ + + corpus_data_path: str = proto.Field( + proto.STRING, + number=1, + ) + query_data_path: str = proto.Field( + proto.STRING, + number=2, + ) + train_data_path: str = proto.Field( + proto.STRING, + number=3, + ) + test_data_path: str = proto.Field( + proto.STRING, + number=4, + ) + + gcs_training_input: GcsTrainingInput = proto.Field( + proto.MESSAGE, + number=2, + oneof="training_input", + message=GcsTrainingInput, + ) + data_store: str = proto.Field( + proto.STRING, + number=1, + ) + model_type: str = proto.Field( + proto.STRING, + number=3, + ) + error_config: import_config.ImportErrorConfig = proto.Field( + proto.MESSAGE, + number=4, + message=import_config.ImportErrorConfig, + ) + model_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class TrainCustomModelResponse(proto.Message): + r"""Response of the + [TrainCustomModelRequest][google.cloud.discoveryengine.v1.TrainCustomModelRequest]. + This message is returned by the + google.longrunning.Operations.response field. + + Attributes: + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while + processing the data. + error_config (google.cloud.discoveryengine_v1.types.ImportErrorConfig): + Echoes the destination for the complete + errors in the request if set. + model_status (str): + The trained model status. Possible values are: + + - **bad-data**: The training data quality is bad. + - **no-improvement**: Tuning didn't improve performance. + Won't deploy. + - **in-progress**: Model training job creation is in + progress. + - **training**: Model is actively training. + - **evaluating**: The model is evaluating trained metrics. + - **indexing**: The model trained metrics are indexing. + - **ready**: The model is ready for serving. + metrics (MutableMapping[str, float]): + The metrics of the trained model. + model_name (str): + Fully qualified name of the + CustomTuningModel. + """ + + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + error_config: import_config.ImportErrorConfig = proto.Field( + proto.MESSAGE, + number=2, + message=import_config.ImportErrorConfig, + ) + model_status: str = proto.Field( + proto.STRING, + number=3, + ) + metrics: MutableMapping[str, float] = proto.MapField( + proto.STRING, + proto.DOUBLE, + number=4, + ) + model_name: str = proto.Field( + proto.STRING, + number=5, + ) + + +class TrainCustomModelMetadata(proto.Message): + r"""Metadata related to the progress of the TrainCustomModel + operation. This is returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py index 450bc325a9de..65732fabdad7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py @@ -671,6 +671,9 @@ class DocumentInfo(proto.Message): The promotion IDs associated with this Document. Currently, this field is restricted to at most one ID. + joined (bool): + Output only. Whether the referenced Document + can be found in the data store. """ id: str = proto.Field( @@ -697,6 +700,10 @@ class DocumentInfo(proto.Message): proto.STRING, number=4, ) + joined: bool = proto.Field( + proto.BOOL, + number=5, + ) class PanelInfo(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py index 1c4059675d8a..0c6552f8e52c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py @@ -141,7 +141,7 @@ UpdateSessionRequest, ) from .types.custom_tuning_model import CustomTuningModel -from .types.data_store import DataStore, LanguageInfo +from .types.data_store import DataStore, LanguageInfo, WorkspaceConfig from .types.data_store_service import ( CreateDataStoreMetadata, CreateDataStoreRequest, @@ -157,6 +157,8 @@ from .types.document import Document, ProcessedDocument from .types.document_processing_config import DocumentProcessingConfig from .types.document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -378,6 +380,8 @@ "BatchCreateTargetSiteMetadata", "BatchCreateTargetSitesRequest", "BatchCreateTargetSitesResponse", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "BatchVerifyTargetSitesMetadata", "BatchVerifyTargetSitesRequest", "BatchVerifyTargetSitesResponse", @@ -632,5 +636,6 @@ "UserEvent", "UserEventServiceClient", "UserInfo", + "WorkspaceConfig", "WriteUserEventRequest", ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json index d1a982467642..ef4a01c7f9ab 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json @@ -634,6 +634,11 @@ "grpc": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -679,6 +684,11 @@ "grpc-async": { "libraryClient": "DocumentServiceAsyncClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -724,6 +734,11 @@ "rest": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py index 558c8aab67c5..9e3a3e937556 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.12.2" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py index d27321c77e71..8d0e4ed5f010 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py @@ -272,7 +272,7 @@ async def update_acl_config( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> acl_config.AclConfig: - r"""Default Acl Configuration for use in a location of a + r"""Default ACL configuration for use in a location of a customer's project. Updates will only reflect to new data stores. Existing data stores will still use the old value. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py index 93ac988cd074..21cb6595333e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py @@ -693,7 +693,7 @@ def update_acl_config( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> acl_config.AclConfig: - r"""Default Acl Configuration for use in a location of a + r"""Default ACL configuration for use in a location of a customer's project. Updates will only reflect to new data stores. Existing data stores will still use the old value. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc.py index 1c655ad8b840..89945a11af67 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc.py @@ -242,7 +242,7 @@ def update_acl_config( ) -> Callable[[acl_config_service.UpdateAclConfigRequest], acl_config.AclConfig]: r"""Return a callable for the update acl config method over gRPC. - Default Acl Configuration for use in a location of a + Default ACL configuration for use in a location of a customer's project. Updates will only reflect to new data stores. Existing data stores will still use the old value. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc_asyncio.py index c20e2dc0d50c..ca2d70a4a12f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc_asyncio.py @@ -248,7 +248,7 @@ def update_acl_config( ]: r"""Return a callable for the update acl config method over gRPC. - Default Acl Configuration for use in a location of a + Default ACL configuration for use in a location of a customer's project. Updates will only reflect to new data stores. Existing data stores will still use the old value. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py index ceaa82413af3..c5be03187f84 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py @@ -1274,6 +1274,118 @@ async def sample_get_processed_document(): # Done; return the response. return response + async def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + Supported for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1alpha.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest, dict]]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + parent (:class:`str`): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py index 4713addb0922..11e4241be964 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py @@ -1723,6 +1723,117 @@ def sample_get_processed_document(): # Done; return the response. return response + def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + Supported for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1alpha.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest, dict]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "DocumentServiceClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/base.py index 5acab8f79e13..6eb81a0622da 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/base.py @@ -186,6 +186,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -277,6 +282,18 @@ def get_processed_document( ]: raise NotImplementedError() + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Union[ + document_service.BatchGetDocumentsMetadataResponse, + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc.py index 462ae0dae998..5a71389bf9b1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc.py @@ -503,6 +503,37 @@ def get_processed_document( ) return self._stubs["get_processed_document"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + Supported for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + ~.BatchGetDocumentsMetadataResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc_asyncio.py index 46ea26ca0ae2..6a794841145b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc_asyncio.py @@ -519,6 +519,37 @@ def get_processed_document( ) return self._stubs["get_processed_document"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + Supported for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + Awaitable[~.BatchGetDocumentsMetadataResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -571,6 +602,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method_async.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py index e3eca6321b0d..23d578101bc4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py @@ -79,6 +79,14 @@ class DocumentServiceRestInterceptor: .. code-block:: python class MyCustomDocumentServiceInterceptor(DocumentServiceRestInterceptor): + def pre_batch_get_documents_metadata(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_get_documents_metadata(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_document(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -145,6 +153,31 @@ def post_update_document(self, response): """ + def pre_batch_get_documents_metadata( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_service.BatchGetDocumentsMetadataRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_batch_get_documents_metadata( + self, response: document_service.BatchGetDocumentsMetadataResponse + ) -> document_service.BatchGetDocumentsMetadataResponse: + """Post-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + def pre_create_document( self, request: document_service.CreateDocumentRequest, @@ -655,6 +688,105 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _BatchGetDocumentsMetadata(DocumentServiceRestStub): + def __hash__(self): + return hash("BatchGetDocumentsMetadata") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "matcher": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Call the batch get documents + metadata method over HTTP. + + Args: + request (~.document_service.BatchGetDocumentsMetadataRequest): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_service.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/collections/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + ] + request, metadata = self._interceptor.pre_batch_get_documents_metadata( + request, metadata + ) + pb_request = document_service.BatchGetDocumentsMetadataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_service.BatchGetDocumentsMetadataResponse() + pb_resp = document_service.BatchGetDocumentsMetadataResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_documents_metadata(resp) + return resp + class _CreateDocument(DocumentServiceRestStub): def __hash__(self): return hash("CreateDocument") @@ -1426,6 +1558,17 @@ def __call__( resp = self._interceptor.post_update_document(resp) return resp + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchGetDocumentsMetadata(self._session, self._host, self._interceptor) # type: ignore + @property def create_document( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py index 3f73bf8fcdbc..caa4c390030b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py @@ -72,7 +72,7 @@ UpdateSessionRequest, ) from .custom_tuning_model import CustomTuningModel -from .data_store import DataStore, LanguageInfo +from .data_store import DataStore, LanguageInfo, WorkspaceConfig from .data_store_service import ( CreateDataStoreMetadata, CreateDataStoreRequest, @@ -88,6 +88,8 @@ from .document import Document, ProcessedDocument from .document_processing_config import DocumentProcessingConfig from .document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -337,6 +339,7 @@ "CustomTuningModel", "DataStore", "LanguageInfo", + "WorkspaceConfig", "CreateDataStoreMetadata", "CreateDataStoreRequest", "DeleteDataStoreMetadata", @@ -350,6 +353,8 @@ "Document", "ProcessedDocument", "DocumentProcessingConfig", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "CreateDocumentRequest", "DeleteDocumentRequest", "GetDocumentRequest", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/answer.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/answer.py index aea3764d85fb..ef0d79a41a53 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/answer.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/answer.py @@ -111,6 +111,12 @@ class AnswerSkippedReason(proto.Enum): For example, "Reply in the tone of a competing company's CEO". Google skips the answer if the query is classified as a jail-breaking query. + CUSTOMER_POLICY_VIOLATION (7): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. """ ANSWER_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -119,6 +125,7 @@ class AnswerSkippedReason(proto.Enum): POTENTIAL_POLICY_VIOLATION = 4 NO_RELEVANT_CONTENT = 5 JAIL_BREAKING_QUERY_IGNORED = 6 + CUSTOMER_POLICY_VIOLATION = 7 class Citation(proto.Message): r"""Citation info for a segment. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control.py index 6f9d1b7ecb2c..c0972089ab3e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control.py @@ -154,7 +154,7 @@ class Control(proto.Message): associated_serving_config_ids (MutableSequence[str]): Output only. List of all [ServingConfig][google.cloud.discoveryengine.v1alpha.ServingConfig] - ids this control is attached to. May take up to 10 minutes + IDs this control is attached to. May take up to 10 minutes to update after changes. solution_type (google.cloud.discoveryengine_v1alpha.types.SolutionType): Required. Immutable. What solution the diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/custom_tuning_model.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/custom_tuning_model.py index ab529ab2e91f..5381115d74c7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/custom_tuning_model.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/custom_tuning_model.py @@ -74,6 +74,9 @@ class ModelState(proto.Enum): NO_IMPROVEMENT (6): The model training finished successfully but metrics did not improve. + INPUT_VALIDATION_FAILED (7): + Input data validation failed. Model training + didn't start. """ MODEL_STATE_UNSPECIFIED = 0 TRAINING_PAUSED = 1 @@ -82,6 +85,7 @@ class ModelState(proto.Enum): READY_FOR_SERVING = 4 TRAINING_FAILED = 5 NO_IMPROVEMENT = 6 + INPUT_VALIDATION_FAILED = 7 name: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/data_store.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/data_store.py index 82b01be6e0e5..d5ef66749bbe 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/data_store.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/data_store.py @@ -31,6 +31,7 @@ manifest={ "DataStore", "LanguageInfo", + "WorkspaceConfig", }, ) @@ -100,6 +101,12 @@ class DataStore(proto.Message): Currently ACL is only supported in ``GENERIC`` industry vertical with non-\ ``PUBLIC_WEBSITE`` content config. + workspace_config (google.cloud.discoveryengine_v1alpha.types.WorkspaceConfig): + Config to store data store type configuration for workspace + data. This must be set when + [DataStore.content_config][google.cloud.discoveryengine.v1alpha.DataStore.content_config] + is set as + [DataStore.ContentConfig.GOOGLE_WORKSPACE][google.cloud.discoveryengine.v1alpha.DataStore.ContentConfig.GOOGLE_WORKSPACE]. document_processing_config (google.cloud.discoveryengine_v1alpha.types.DocumentProcessingConfig): Configuration for Document understanding and enrichment. @@ -138,11 +145,16 @@ class ContentConfig(proto.Enum): PUBLIC_WEBSITE (3): The data store is used for public website search. + GOOGLE_WORKSPACE (4): + The data store is used for workspace search. Details of + workspace data store are specified in the + [WorkspaceConfig][google.cloud.discoveryengine.v1alpha.WorkspaceConfig]. """ CONTENT_CONFIG_UNSPECIFIED = 0 NO_CONTENT = 1 CONTENT_REQUIRED = 2 PUBLIC_WEBSITE = 3 + GOOGLE_WORKSPACE = 4 name: str = proto.Field( proto.STRING, @@ -190,6 +202,11 @@ class ContentConfig(proto.Enum): proto.BOOL, number=24, ) + workspace_config: "WorkspaceConfig" = proto.Field( + proto.MESSAGE, + number=25, + message="WorkspaceConfig", + ) document_processing_config: gcd_document_processing_config.DocumentProcessingConfig = proto.Field( proto.MESSAGE, number=27, @@ -240,4 +257,57 @@ class LanguageInfo(proto.Message): ) +class WorkspaceConfig(proto.Message): + r"""Config to store data store type configuration for workspace + data + + Attributes: + type_ (google.cloud.discoveryengine_v1alpha.types.WorkspaceConfig.Type): + The Google Workspace data source. + dasher_customer_id (str): + Obfuscated Dasher customer ID. + """ + + class Type(proto.Enum): + r"""Specifies the type of Workspace App supported by this + DataStore + + Values: + TYPE_UNSPECIFIED (0): + Defaults to an unspecified Workspace type. + GOOGLE_DRIVE (1): + Workspace Data Store contains Drive data + GOOGLE_MAIL (2): + Workspace Data Store contains Mail data + GOOGLE_SITES (3): + Workspace Data Store contains Sites data + GOOGLE_CALENDAR (4): + Workspace Data Store contains Calendar data + GOOGLE_CHAT (5): + Workspace Data Store contains Chat data + GOOGLE_GROUPS (6): + Workspace Data Store contains Groups data + GOOGLE_KEEP (7): + Workspace Data Store contains Keep data + """ + TYPE_UNSPECIFIED = 0 + GOOGLE_DRIVE = 1 + GOOGLE_MAIL = 2 + GOOGLE_SITES = 3 + GOOGLE_CALENDAR = 4 + GOOGLE_CHAT = 5 + GOOGLE_GROUPS = 6 + GOOGLE_KEEP = 7 + + type_: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + dasher_customer_id: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document.py index 7792e6eb5a81..119f1b89d472 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document.py @@ -19,6 +19,7 @@ from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore import proto # type: ignore from google.cloud.discoveryengine_v1alpha.types import common @@ -96,6 +97,14 @@ class Document(proto.Message): This field is OUTPUT_ONLY. If this field is not populated, it means the document has never been indexed. + index_status (google.cloud.discoveryengine_v1alpha.types.Document.IndexStatus): + Output only. The index status of the document. + + - If document is indexed successfully, the index_time field + is populated. + - Otherwise, if document is not indexed due to errors, the + error_samples field is populated. + - Otherwise, index_status is unset. """ class Content(proto.Message): @@ -211,6 +220,31 @@ class AccessRestriction(proto.Message): message="Document.AclInfo.AccessRestriction", ) + class IndexStatus(proto.Message): + r"""Index status of the document. + + Attributes: + index_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the document was indexed. + If this field is populated, it means the + document has been indexed. + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while indexing + the document. If this field is populated, the + document is not indexed due to errors. + """ + + index_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + struct_data: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=4, @@ -258,6 +292,11 @@ class AccessRestriction(proto.Message): number=13, message=timestamp_pb2.Timestamp, ) + index_status: IndexStatus = proto.Field( + proto.MESSAGE, + number=15, + message=IndexStatus, + ) class ProcessedDocument(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document_service.py index cee091f225bd..f1e063d05b9b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document_service.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.cloud.discoveryengine_v1alpha.types import document as gcd_document @@ -32,6 +33,8 @@ "UpdateDocumentRequest", "DeleteDocumentRequest", "GetProcessedDocumentRequest", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", }, ) @@ -349,4 +352,163 @@ class ProcessedDocumentFormat(proto.Enum): ) +class BatchGetDocumentsMetadataRequest(proto.Message): + r"""Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + matcher (google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest.Matcher): + Required. Matcher for the + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + """ + + class UrisMatcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1alpha.Document]s by exact + uris. + + Attributes: + uris (MutableSequence[str]): + The exact URIs to match by. + """ + + uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class Matcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + Currently supports matching by exact URIs. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uris_matcher (google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest.UrisMatcher): + Matcher by exact URIs. + + This field is a member of `oneof`_ ``matcher``. + """ + + uris_matcher: "BatchGetDocumentsMetadataRequest.UrisMatcher" = proto.Field( + proto.MESSAGE, + number=1, + oneof="matcher", + message="BatchGetDocumentsMetadataRequest.UrisMatcher", + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + matcher: Matcher = proto.Field( + proto.MESSAGE, + number=2, + message=Matcher, + ) + + +class BatchGetDocumentsMetadataResponse(proto.Message): + r"""Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + documents_metadata (MutableSequence[google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse.DocumentMetadata]): + The metadata of the + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + """ + + class State(proto.Enum): + r"""The state of the + [Document][google.cloud.discoveryengine.v1alpha.Document]. + + Values: + STATE_UNSPECIFIED (0): + Should never be set. + INDEXED (1): + The + [Document][google.cloud.discoveryengine.v1alpha.Document] is + indexed. + NOT_IN_TARGET_SITE (2): + The + [Document][google.cloud.discoveryengine.v1alpha.Document] is + not indexed because its URI is not in the + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. + NOT_IN_INDEX (3): + The + [Document][google.cloud.discoveryengine.v1alpha.Document] is + not indexed. + """ + STATE_UNSPECIFIED = 0 + INDEXED = 1 + NOT_IN_TARGET_SITE = 2 + NOT_IN_INDEX = 3 + + class DocumentMetadata(proto.Message): + r"""The metadata of a + [Document][google.cloud.discoveryengine.v1alpha.Document]. + + Attributes: + matcher_value (google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue): + The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1alpha.Document]. + state (google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse.State): + The state of the document. + last_refreshed_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp of the last time the + [Document][google.cloud.discoveryengine.v1alpha.Document] + was last indexed. + """ + + class MatcherValue(proto.Message): + r"""The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1alpha.Document]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + If match by URI, the URI of the + [Document][google.cloud.discoveryengine.v1alpha.Document]. + + This field is a member of `oneof`_ ``matcher_value``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + oneof="matcher_value", + ) + + matcher_value: "BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue" = proto.Field( + proto.MESSAGE, + number=2, + message="BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue", + ) + state: "BatchGetDocumentsMetadataResponse.State" = proto.Field( + proto.ENUM, + number=3, + enum="BatchGetDocumentsMetadataResponse.State", + ) + last_refreshed_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + documents_metadata: MutableSequence[DocumentMetadata] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=DocumentMetadata, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounded_generation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounded_generation_service.py index 40fa41fd061f..03ce6ee20fd3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounded_generation_service.py @@ -180,8 +180,9 @@ class Claim(proto.Message): false. In that case, no grounding check was done for the claim and therefore [citation_indices][google.cloud.discoveryengine.v1alpha.CheckGroundingResponse.Claim.citation_indices], + [anti_citation_indices][google.cloud.discoveryengine.v1alpha.CheckGroundingResponse.Claim.anti_citation_indices], and - [anti_citation_indices][google.cloud.discoveryengine.v1alpha.CheckGroundingResponse.Claim.anti_citation_indices] + [score][google.cloud.discoveryengine.v1alpha.CheckGroundingResponse.Claim.score] should not be returned. This field is a member of `oneof`_ ``_grounding_check_required``. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/import_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/import_config.py index 35e5e74f7660..7052c1e8850f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/import_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/import_config.py @@ -129,10 +129,10 @@ class BigQuerySource(proto.Message): This field is a member of `oneof`_ ``partition``. project_id (str): - The project ID (can be project # or ID) that - the BigQuery source is in with a length limit of - 128 characters. If not specified, inherits the - project ID from the parent request. + The project ID or the project number that + contains the BigQuery source. Has a length limit + of 128 characters. If not specified, inherits + the project ID from the parent request. dataset_id (str): Required. The BigQuery data set to copy the data from with a length limit of 1,024 @@ -205,9 +205,9 @@ class SpannerSource(proto.Message): Attributes: project_id (str): - The project ID that the Spanner source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Spanner + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the source @@ -428,9 +428,9 @@ class BigtableSource(proto.Message): Attributes: project_id (str): - The project ID that the Bigtable source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Bigtable + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the Cloud @@ -504,9 +504,9 @@ class CloudSqlSource(proto.Message): Attributes: project_id (str): - The project ID that the Cloud SQL source is - in with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Cloud SQL + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The Cloud SQL instance to copy the @@ -564,9 +564,9 @@ class AlloyDbSource(proto.Message): Attributes: project_id (str): - The project ID that the AlloyDB source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the AlloyDB + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. location_id (str): Required. The AlloyDB location to copy the diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py index 00e46651dd81..f1af3c79f881 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py @@ -1605,7 +1605,8 @@ class SearchResponse(proto.Message): A unique search token. This should be included in the [UserEvent][google.cloud.discoveryengine.v1alpha.UserEvent] logs resulting from this search, which enables accurate - attribution of search model performance. + attribution of search model performance. This also helps to + identify a request during the customer support scenarios. redirect_uri (str): The URI of a customer-defined redirect page. If redirect action is triggered, no search is performed, and only @@ -1876,6 +1877,18 @@ class SummarySkippedReason(proto.Enum): CEO". Only used when [SearchRequest.ContentSearchSpec.SummarySpec.ignore_jail_breaking_query] is set to ``true``. + CUSTOMER_POLICY_VIOLATION (8): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. + NON_SUMMARY_SEEKING_QUERY_IGNORED_V2 (9): + The non-answer seeking query ignored case. + + Only used when + [SearchRequest.ContentSearchSpec.SummarySpec.ignore_non_answer_seeking_query] + is set to ``true``. """ SUMMARY_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -1885,6 +1898,8 @@ class SummarySkippedReason(proto.Enum): LLM_ADDON_NOT_ENABLED = 5 NO_RELEVANT_CONTENT = 6 JAIL_BREAKING_QUERY_IGNORED = 7 + CUSTOMER_POLICY_VIOLATION = 8 + NON_SUMMARY_SEEKING_QUERY_IGNORED_V2 = 9 class SafetyAttributes(proto.Message): r"""Safety Attribute categories and their associated confidence @@ -2169,6 +2184,9 @@ class StringConstraint(proto.Message): Values of the string field. The record will only be returned if the field value matches one of the values specified here. + query_segment (str): + Identifies the keywords within the search + query that match a filter. """ field_name: str = proto.Field( @@ -2179,6 +2197,10 @@ class StringConstraint(proto.Message): proto.STRING, number=2, ) + query_segment: str = proto.Field( + proto.STRING, + number=3, + ) class NumberConstraint(proto.Message): r"""Constraint expression of a number field. Example: price < @@ -2195,6 +2217,9 @@ class NumberConstraint(proto.Message): value (float): The value specified in the numerical constraint. + query_segment (str): + Identifies the keywords within the search + query that match a filter. """ class Comparison(proto.Enum): @@ -2234,6 +2259,10 @@ class Comparison(proto.Enum): proto.DOUBLE, number=3, ) + query_segment: str = proto.Field( + proto.STRING, + number=4, + ) class GeolocationConstraint(proto.Message): r"""Constraint of a geolocation field. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event.py index 4deac76799fd..2162001f7b3b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event.py @@ -676,6 +676,9 @@ class DocumentInfo(proto.Message): The promotion IDs associated with this Document. Currently, this field is restricted to at most one ID. + joined (bool): + Output only. Whether the referenced Document + can be found in the data store. """ id: str = proto.Field( @@ -702,6 +705,10 @@ class DocumentInfo(proto.Message): proto.STRING, number=4, ) + joined: bool = proto.Field( + proto.BOOL, + number=5, + ) class PanelInfo(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py index 3bde65ad26b0..c469dab86a26 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py @@ -139,6 +139,8 @@ from .types.document import Document from .types.document_processing_config import DocumentProcessingConfig from .types.document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -214,6 +216,7 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeErrorConfig, PurgeSuggestionDenyListEntriesMetadata, PurgeSuggestionDenyListEntriesRequest, PurgeSuggestionDenyListEntriesResponse, @@ -338,6 +341,8 @@ "BatchCreateTargetSiteMetadata", "BatchCreateTargetSitesRequest", "BatchCreateTargetSitesResponse", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "BatchVerifyTargetSitesMetadata", "BatchVerifyTargetSitesRequest", "BatchVerifyTargetSitesResponse", @@ -497,6 +502,7 @@ "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeErrorConfig", "PurgeSuggestionDenyListEntriesMetadata", "PurgeSuggestionDenyListEntriesRequest", "PurgeSuggestionDenyListEntriesResponse", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json index 8afe7dc88cd2..02ac1aa251d2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json @@ -506,6 +506,11 @@ "grpc": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -546,6 +551,11 @@ "grpc-async": { "libraryClient": "DocumentServiceAsyncClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -586,6 +596,11 @@ "rest": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py index 558c8aab67c5..9e3a3e937556 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.12.2" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py index 3cd99963bdee..a75f7fba7fec 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py @@ -1069,7 +1069,11 @@ async def sample_purge_documents(): client = discoveryengine_v1beta.DocumentServiceAsyncClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1beta.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1beta.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) @@ -1146,6 +1150,118 @@ async def sample_purge_documents(): # Done; return the response. return response + async def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1beta.Document]s. + Supported for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1beta.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest, dict]]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + parent (:class:`str`): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py index 4dbf8a833298..696861d2050d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py @@ -1523,7 +1523,11 @@ def sample_purge_documents(): client = discoveryengine_v1beta.DocumentServiceClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1beta.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1beta.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) @@ -1598,6 +1602,117 @@ def sample_purge_documents(): # Done; return the response. return response + def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1beta.Document]s. + Supported for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1beta.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest, dict]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "DocumentServiceClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py index 6491783a2c52..8cb10eba46cf 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py @@ -181,6 +181,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -263,6 +268,18 @@ def purge_documents( ]: raise NotImplementedError() + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Union[ + document_service.BatchGetDocumentsMetadataResponse, + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py index 97a0a6923af9..5d1d8749c5d0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py @@ -473,6 +473,37 @@ def purge_documents( ) return self._stubs["purge_documents"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1beta.Document]s. + Supported for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + ~.BatchGetDocumentsMetadataResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py index d8dc444b89d1..cebef2f02dd5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py @@ -488,6 +488,37 @@ def purge_documents( ) return self._stubs["purge_documents"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1beta.Document]s. + Supported for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + Awaitable[~.BatchGetDocumentsMetadataResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -535,6 +566,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method_async.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py index db93f79c1cf2..2014752b11ae 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py @@ -79,6 +79,14 @@ class DocumentServiceRestInterceptor: .. code-block:: python class MyCustomDocumentServiceInterceptor(DocumentServiceRestInterceptor): + def pre_batch_get_documents_metadata(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_get_documents_metadata(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_document(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -137,6 +145,31 @@ def post_update_document(self, response): """ + def pre_batch_get_documents_metadata( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_service.BatchGetDocumentsMetadataRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_batch_get_documents_metadata( + self, response: document_service.BatchGetDocumentsMetadataResponse + ) -> document_service.BatchGetDocumentsMetadataResponse: + """Post-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + def pre_create_document( self, request: document_service.CreateDocumentRequest, @@ -616,6 +649,105 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _BatchGetDocumentsMetadata(DocumentServiceRestStub): + def __hash__(self): + return hash("BatchGetDocumentsMetadata") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "matcher": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Call the batch get documents + metadata method over HTTP. + + Args: + request (~.document_service.BatchGetDocumentsMetadataRequest): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_service.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + ] + request, metadata = self._interceptor.pre_batch_get_documents_metadata( + request, metadata + ) + pb_request = document_service.BatchGetDocumentsMetadataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_service.BatchGetDocumentsMetadataResponse() + pb_resp = document_service.BatchGetDocumentsMetadataResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_documents_metadata(resp) + return resp + class _CreateDocument(DocumentServiceRestStub): def __hash__(self): return hash("CreateDocument") @@ -1289,6 +1421,17 @@ def __call__( resp = self._interceptor.post_update_document(resp) return resp + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchGetDocumentsMetadata(self._session, self._host, self._interceptor) # type: ignore + @property def create_document( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py index 1392c0e5f3a6..cc7c07b55354 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py @@ -79,6 +79,8 @@ from .document import Document from .document_processing_config import DocumentProcessingConfig from .document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -154,6 +156,7 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeErrorConfig, PurgeSuggestionDenyListEntriesMetadata, PurgeSuggestionDenyListEntriesRequest, PurgeSuggestionDenyListEntriesResponse, @@ -311,6 +314,8 @@ "UpdateDataStoreRequest", "Document", "DocumentProcessingConfig", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "CreateDocumentRequest", "DeleteDocumentRequest", "GetDocumentRequest", @@ -379,6 +384,7 @@ "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeErrorConfig", "PurgeSuggestionDenyListEntriesMetadata", "PurgeSuggestionDenyListEntriesRequest", "PurgeSuggestionDenyListEntriesResponse", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/answer.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/answer.py index 57d56b1b142f..1347bf5c6e59 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/answer.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/answer.py @@ -105,6 +105,18 @@ class AnswerSkippedReason(proto.Enum): Google skips the answer if there is no relevant content in the retrieved search results. + JAIL_BREAKING_QUERY_IGNORED (6): + The jail-breaking query ignored case. + + For example, "Reply in the tone of a competing + company's CEO". Google skips the answer if the + query is classified as a jail-breaking query. + CUSTOMER_POLICY_VIOLATION (7): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. """ ANSWER_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -112,6 +124,8 @@ class AnswerSkippedReason(proto.Enum): OUT_OF_DOMAIN_QUERY_IGNORED = 3 POTENTIAL_POLICY_VIOLATION = 4 NO_RELEVANT_CONTENT = 5 + JAIL_BREAKING_QUERY_IGNORED = 6 + CUSTOMER_POLICY_VIOLATION = 7 class Citation(proto.Message): r"""Citation info for a segment. @@ -171,6 +185,10 @@ class Reference(proto.Message): chunk_info (google.cloud.discoveryengine_v1beta.types.Answer.Reference.ChunkInfo): Chunk information. + This field is a member of `oneof`_ ``content``. + structured_document_info (google.cloud.discoveryengine_v1beta.types.Answer.Reference.StructuredDocumentInfo): + Structured document information. + This field is a member of `oneof`_ ``content``. """ @@ -196,11 +214,22 @@ class UnstructuredDocumentInfo(proto.Message): class ChunkContent(proto.Message): r"""Chunk content. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: content (str): Chunk textual content. page_identifier (str): Page identifier. + relevance_score (float): + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. + + This field is a member of `oneof`_ ``_relevance_score``. """ content: str = proto.Field( @@ -211,6 +240,11 @@ class ChunkContent(proto.Message): proto.STRING, number=2, ) + relevance_score: float = proto.Field( + proto.FLOAT, + number=3, + optional=True, + ) document: str = proto.Field( proto.STRING, @@ -248,7 +282,12 @@ class ChunkInfo(proto.Message): content (str): Chunk textual content. relevance_score (float): - Relevance score. + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. This field is a member of `oneof`_ ``_relevance_score``. document_metadata (google.cloud.discoveryengine_v1beta.types.Answer.Reference.ChunkInfo.DocumentMetadata): @@ -316,6 +355,26 @@ class DocumentMetadata(proto.Message): ) ) + class StructuredDocumentInfo(proto.Message): + r"""Structured search information. + + Attributes: + document (str): + Document resource name. + struct_data (google.protobuf.struct_pb2.Struct): + Structured search data. + """ + + document: str = proto.Field( + proto.STRING, + number=1, + ) + struct_data: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + unstructured_document_info: "Answer.Reference.UnstructuredDocumentInfo" = ( proto.Field( proto.MESSAGE, @@ -330,6 +389,14 @@ class DocumentMetadata(proto.Message): oneof="content", message="Answer.Reference.ChunkInfo", ) + structured_document_info: "Answer.Reference.StructuredDocumentInfo" = ( + proto.Field( + proto.MESSAGE, + number=3, + oneof="content", + message="Answer.Reference.StructuredDocumentInfo", + ) + ) class Step(proto.Message): r"""Step information. @@ -456,7 +523,12 @@ class ChunkInfo(proto.Message): content (str): Chunk textual content. relevance_score (float): - Relevance score. + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. This field is a member of `oneof`_ ``_relevance_score``. """ @@ -574,10 +646,13 @@ class Type(proto.Enum): Adversarial query classification type. NON_ANSWER_SEEKING_QUERY (2): Non-answer-seeking query classification type. + JAIL_BREAKING_QUERY (3): + Jail-breaking query classification type. """ TYPE_UNSPECIFIED = 0 ADVERSARIAL_QUERY = 1 NON_ANSWER_SEEKING_QUERY = 2 + JAIL_BREAKING_QUERY = 3 type_: "Answer.QueryUnderstandingInfo.QueryClassificationInfo.Type" = ( proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control.py index 9fa4d299a928..f4a052314afd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control.py @@ -154,7 +154,7 @@ class Control(proto.Message): associated_serving_config_ids (MutableSequence[str]): Output only. List of all [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] - ids this control is attached to. May take up to 10 minutes + IDs this control is attached to. May take up to 10 minutes to update after changes. solution_type (google.cloud.discoveryengine_v1beta.types.SolutionType): Required. Immutable. What solution the diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py index 9ddc57efdf78..e09b08bc0e50 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py @@ -924,10 +924,13 @@ class Type(proto.Enum): Adversarial query classification type. NON_ANSWER_SEEKING_QUERY (2): Non-answer-seeking query classification type. + JAIL_BREAKING_QUERY (3): + Jail-breaking query classification type. """ TYPE_UNSPECIFIED = 0 ADVERSARIAL_QUERY = 1 NON_ANSWER_SEEKING_QUERY = 2 + JAIL_BREAKING_QUERY = 3 types: MutableSequence[ "AnswerQueryRequest.QueryUnderstandingSpec.QueryClassificationSpec.Type" diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/custom_tuning_model.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/custom_tuning_model.py index 257280080b65..d0f53427c220 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/custom_tuning_model.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/custom_tuning_model.py @@ -74,6 +74,9 @@ class ModelState(proto.Enum): NO_IMPROVEMENT (6): The model training finished successfully but metrics did not improve. + INPUT_VALIDATION_FAILED (7): + Input data validation failed. Model training + didn't start. """ MODEL_STATE_UNSPECIFIED = 0 TRAINING_PAUSED = 1 @@ -82,6 +85,7 @@ class ModelState(proto.Enum): READY_FOR_SERVING = 4 TRAINING_FAILED = 5 NO_IMPROVEMENT = 6 + INPUT_VALIDATION_FAILED = 7 name: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py index 00619f5b456e..b09bd5a4a0f1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py @@ -19,6 +19,7 @@ from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -91,6 +92,14 @@ class Document(proto.Message): This field is OUTPUT_ONLY. If this field is not populated, it means the document has never been indexed. + index_status (google.cloud.discoveryengine_v1beta.types.Document.IndexStatus): + Output only. The index status of the document. + + - If document is indexed successfully, the index_time field + is populated. + - Otherwise, if document is not indexed due to errors, the + error_samples field is populated. + - Otherwise, index_status is unset. """ class Content(proto.Message): @@ -154,6 +163,31 @@ class Content(proto.Message): number=1, ) + class IndexStatus(proto.Message): + r"""Index status of the document. + + Attributes: + index_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the document was indexed. + If this field is populated, it means the + document has been indexed. + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while indexing + the document. If this field is populated, the + document is not indexed due to errors. + """ + + index_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + struct_data: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=4, @@ -196,6 +230,11 @@ class Content(proto.Message): number=13, message=timestamp_pb2.Timestamp, ) + index_status: IndexStatus = proto.Field( + proto.MESSAGE, + number=15, + message=IndexStatus, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_processing_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_processing_config.py index ee06ee0d8a47..2192893da7b4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_processing_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_processing_config.py @@ -62,6 +62,8 @@ class DocumentProcessingConfig(proto.Message): digital parsing and layout parsing are supported. - ``pptx``: Override parsing config for PPTX files, only digital parsing and layout parsing are supported. + - ``xlsx``: Override parsing config for XLSX files, only + digital parsing and layout parsing are supported. """ class ChunkingConfig(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py index 52f0c0f67f71..39c4a3a68d11 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.cloud.discoveryengine_v1beta.types import document as gcd_document @@ -31,6 +32,8 @@ "CreateDocumentRequest", "UpdateDocumentRequest", "DeleteDocumentRequest", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", }, ) @@ -271,4 +274,174 @@ class DeleteDocumentRequest(proto.Message): ) +class BatchGetDocumentsMetadataRequest(proto.Message): + r"""Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + matcher (google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest.Matcher): + Required. Matcher for the + [Document][google.cloud.discoveryengine.v1beta.Document]s. + """ + + class UrisMatcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1beta.Document]s by exact + uris. + + Attributes: + uris (MutableSequence[str]): + The exact URIs to match by. + """ + + uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class Matcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1beta.Document]s. Currently + supports matching by exact URIs. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uris_matcher (google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest.UrisMatcher): + Matcher by exact URIs. + + This field is a member of `oneof`_ ``matcher``. + """ + + uris_matcher: "BatchGetDocumentsMetadataRequest.UrisMatcher" = proto.Field( + proto.MESSAGE, + number=1, + oneof="matcher", + message="BatchGetDocumentsMetadataRequest.UrisMatcher", + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + matcher: Matcher = proto.Field( + proto.MESSAGE, + number=2, + message=Matcher, + ) + + +class BatchGetDocumentsMetadataResponse(proto.Message): + r"""Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + documents_metadata (MutableSequence[google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse.DocumentMetadata]): + The metadata of the + [Document][google.cloud.discoveryengine.v1beta.Document]s. + """ + + class State(proto.Enum): + r"""The state of the + [Document][google.cloud.discoveryengine.v1beta.Document]. + + Values: + STATE_UNSPECIFIED (0): + Should never be set. + INDEXED (1): + The [Document][google.cloud.discoveryengine.v1beta.Document] + is indexed. + NOT_IN_TARGET_SITE (2): + The [Document][google.cloud.discoveryengine.v1beta.Document] + is not indexed because its URI is not in the + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + NOT_IN_INDEX (3): + The [Document][google.cloud.discoveryengine.v1beta.Document] + is not indexed. + """ + STATE_UNSPECIFIED = 0 + INDEXED = 1 + NOT_IN_TARGET_SITE = 2 + NOT_IN_INDEX = 3 + + class DocumentMetadata(proto.Message): + r"""The metadata of a + [Document][google.cloud.discoveryengine.v1beta.Document]. + + Attributes: + matcher_value (google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue): + The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1beta.Document]. + state (google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse.State): + The state of the document. + last_refreshed_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp of the last time the + [Document][google.cloud.discoveryengine.v1beta.Document] was + last indexed. + data_ingestion_source (str): + The data ingestion source of the + [Document][google.cloud.discoveryengine.v1beta.Document]. + + Allowed values are: + + - ``batch``: Data ingested via Batch API, e.g., + ImportDocuments. + - ``streaming`` Data ingested via Streaming API, e.g., FHIR + streaming. + """ + + class MatcherValue(proto.Message): + r"""The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1beta.Document]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + If match by URI, the URI of the + [Document][google.cloud.discoveryengine.v1beta.Document]. + + This field is a member of `oneof`_ ``matcher_value``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + oneof="matcher_value", + ) + + matcher_value: "BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue" = proto.Field( + proto.MESSAGE, + number=2, + message="BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue", + ) + state: "BatchGetDocumentsMetadataResponse.State" = proto.Field( + proto.ENUM, + number=3, + enum="BatchGetDocumentsMetadataResponse.State", + ) + last_refreshed_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + data_ingestion_source: str = proto.Field( + proto.STRING, + number=5, + ) + + documents_metadata: MutableSequence[DocumentMetadata] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=DocumentMetadata, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounded_generation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounded_generation_service.py index 3734ca3d5c68..cbdefdb249b3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounded_generation_service.py @@ -180,8 +180,9 @@ class Claim(proto.Message): false. In that case, no grounding check was done for the claim and therefore [citation_indices][google.cloud.discoveryengine.v1beta.CheckGroundingResponse.Claim.citation_indices], + [anti_citation_indices][google.cloud.discoveryengine.v1beta.CheckGroundingResponse.Claim.anti_citation_indices], and - [anti_citation_indices][google.cloud.discoveryengine.v1beta.CheckGroundingResponse.Claim.anti_citation_indices] + [score][google.cloud.discoveryengine.v1beta.CheckGroundingResponse.Claim.score] should not be returned. This field is a member of `oneof`_ ``_grounding_check_required``. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py index 40e7225633b5..b4cb57eb641b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py @@ -129,10 +129,10 @@ class BigQuerySource(proto.Message): This field is a member of `oneof`_ ``partition``. project_id (str): - The project ID (can be project # or ID) that - the BigQuery source is in with a length limit of - 128 characters. If not specified, inherits the - project ID from the parent request. + The project ID or the project number that + contains the BigQuery source. Has a length limit + of 128 characters. If not specified, inherits + the project ID from the parent request. dataset_id (str): Required. The BigQuery data set to copy the data from with a length limit of 1,024 @@ -205,9 +205,9 @@ class SpannerSource(proto.Message): Attributes: project_id (str): - The project ID that the Spanner source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Spanner + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the source @@ -428,9 +428,9 @@ class BigtableSource(proto.Message): Attributes: project_id (str): - The project ID that the Bigtable source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Bigtable + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the Cloud @@ -478,6 +478,11 @@ class FhirStoreSource(proto.Message): characters. Can be specified if one wants to have the FhirStore export to a specific Cloud Storage directory. + resource_types (MutableSequence[str]): + The FHIR resource types to import. The resource types should + be a subset of all `supported FHIR resource + types `__. + Default to all supported FHIR resource types if empty. """ fhir_store: str = proto.Field( @@ -488,6 +493,10 @@ class FhirStoreSource(proto.Message): proto.STRING, number=2, ) + resource_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class CloudSqlSource(proto.Message): @@ -495,9 +504,9 @@ class CloudSqlSource(proto.Message): Attributes: project_id (str): - The project ID that the Cloud SQL source is - in with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Cloud SQL + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The Cloud SQL instance to copy the @@ -555,9 +564,9 @@ class AlloyDbSource(proto.Message): Attributes: project_id (str): - The project ID that the AlloyDB source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the AlloyDB + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. location_id (str): Required. The AlloyDB location to copy the diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py index 5a7d3c02f8af..829032161fdd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py @@ -21,12 +21,15 @@ from google.rpc import status_pb2 # type: ignore import proto # type: ignore +from google.cloud.discoveryengine_v1beta.types import import_config + __protobuf__ = proto.module( package="google.cloud.discoveryengine.v1beta", manifest={ "PurgeUserEventsRequest", "PurgeUserEventsResponse", "PurgeUserEventsMetadata", + "PurgeErrorConfig", "PurgeDocumentsRequest", "PurgeDocumentsResponse", "PurgeDocumentsMetadata", @@ -151,24 +154,100 @@ class PurgeUserEventsMetadata(proto.Message): ) +class PurgeErrorConfig(proto.Message): + r"""Configuration of destination for Purge related errors. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_prefix (str): + Cloud Storage prefix for purge errors. This must be an + empty, existing Cloud Storage directory. Purge errors are + written to sharded files in this directory, one per line, as + a JSON-encoded ``google.rpc.Status`` message. + + This field is a member of `oneof`_ ``destination``. + """ + + gcs_prefix: str = proto.Field( + proto.STRING, + number=1, + oneof="destination", + ) + + class PurgeDocumentsRequest(proto.Message): r"""Request message for [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments] method. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: + gcs_source (google.cloud.discoveryengine_v1beta.types.GcsSource): + Cloud Storage location for the input content. Supported + ``data_schema``: + + - ``document_id``: One valid + [Document.id][google.cloud.discoveryengine.v1beta.Document.id] + per line. + + This field is a member of `oneof`_ ``source``. + inline_source (google.cloud.discoveryengine_v1beta.types.PurgeDocumentsRequest.InlineSource): + Inline source for the input content for + purge. + + This field is a member of `oneof`_ ``source``. parent (str): Required. The parent resource name, such as ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. filter (str): Required. Filter matching documents to purge. Only currently supported value is ``*`` (all items). + error_config (google.cloud.discoveryengine_v1beta.types.PurgeErrorConfig): + The desired location of errors incurred + during the purge. force (bool): Actually performs the purge. If ``force`` is set to false, return the expected purge count without deleting any documents. """ + class InlineSource(proto.Message): + r"""The inline source for the input config for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments] + method. + + Attributes: + documents (MutableSequence[str]): + Required. A list of full resource name of documents to + purge. In the format + ``projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*``. + Recommended max of 100 items. + """ + + documents: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + gcs_source: import_config.GcsSource = proto.Field( + proto.MESSAGE, + number=5, + oneof="source", + message=import_config.GcsSource, + ) + inline_source: InlineSource = proto.Field( + proto.MESSAGE, + number=6, + oneof="source", + message=InlineSource, + ) parent: str = proto.Field( proto.STRING, number=1, @@ -177,6 +256,11 @@ class PurgeDocumentsRequest(proto.Message): proto.STRING, number=2, ) + error_config: "PurgeErrorConfig" = proto.Field( + proto.MESSAGE, + number=7, + message="PurgeErrorConfig", + ) force: bool = proto.Field( proto.BOOL, number=3, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py index 710ff4c38cba..8db64de1bb12 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py @@ -139,9 +139,12 @@ class SearchRequest(proto.Message): object. Leave it unset if ordered by relevance. ``order_by`` expression is case-sensitive. - For more information on ordering for retail search, see - `Ordering `__ - + For more information on ordering the website search results, + see `Order web search + results `__. + For more information on ordering the healthcare search + results, see `Order healthcare search + results `__. If this field is unrecognizable, an ``INVALID_ARGUMENT`` is returned. user_info (google.cloud.discoveryengine_v1beta.types.UserInfo): @@ -329,8 +332,39 @@ class SearchRequest(proto.Message): Session specification. Can be used only when ``session`` is set. + relevance_threshold (google.cloud.discoveryengine_v1beta.types.SearchRequest.RelevanceThreshold): + The relevance threshold of the search + results. + Default to Google defined threshold, leveraging + a balance of precision and recall to deliver + both highly accurate results and comprehensive + coverage of relevant information. """ + class RelevanceThreshold(proto.Enum): + r"""The relevance threshold of the search results. The higher + relevance threshold is, the higher relevant results are shown + and the less number of results are returned. + + Values: + RELEVANCE_THRESHOLD_UNSPECIFIED (0): + Default value. In this case, server behavior + defaults to Google defined threshold. + LOWEST (1): + Lowest relevance threshold. + LOW (2): + Low relevance threshold. + MEDIUM (3): + Medium relevance threshold. + HIGH (4): + High relevance threshold. + """ + RELEVANCE_THRESHOLD_UNSPECIFIED = 0 + LOWEST = 1 + LOW = 2 + MEDIUM = 3 + HIGH = 4 + class ImageQuery(proto.Message): r"""Specifies the image query input. @@ -980,6 +1014,14 @@ class SummarySpec(proto.Message): navigational queries. If this field is set to ``true``, we skip generating summaries for non-summary seeking queries and return fallback messages instead. + ignore_low_relevant_content (bool): + Specifies whether to filter out queries that have low + relevance. The default value is ``false``. + + If this field is set to ``false``, all search results are + used regardless of relevance to generate answers. If set to + ``true``, only queries with high relevance search results + will generate answers. model_prompt_spec (google.cloud.discoveryengine_v1beta.types.SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec): If specified, the spec will be used to modify the prompt provided to the LLM. @@ -1057,6 +1099,10 @@ class ModelSpec(proto.Message): proto.BOOL, number=4, ) + ignore_low_relevant_content: bool = proto.Field( + proto.BOOL, + number=9, + ) model_prompt_spec: "SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec" = proto.Field( proto.MESSAGE, number=5, @@ -1519,6 +1565,11 @@ class SessionSpec(proto.Message): number=42, message=SessionSpec, ) + relevance_threshold: RelevanceThreshold = proto.Field( + proto.ENUM, + number=44, + enum=RelevanceThreshold, + ) class SearchResponse(proto.Message): @@ -1545,7 +1596,8 @@ class SearchResponse(proto.Message): A unique search token. This should be included in the [UserEvent][google.cloud.discoveryengine.v1beta.UserEvent] logs resulting from this search, which enables accurate - attribution of search model performance. + attribution of search model performance. This also helps to + identify a request during the customer support scenarios. redirect_uri (str): The URI of a customer-defined redirect page. If redirect action is triggered, no search is performed, and only @@ -1585,6 +1637,9 @@ class SearchResponse(proto.Message): Only set if [SearchRequest.session][google.cloud.discoveryengine.v1beta.SearchRequest.session] is provided. See its description for more details. + one_box_results (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchResponse.OneBoxResult]): + A list of One Box results. There can be + multiple One Box results of different types. """ class SearchResult(proto.Message): @@ -1773,13 +1828,13 @@ class SummarySkippedReason(proto.Enum): ADVERSARIAL_QUERY_IGNORED (1): The adversarial query ignored case. - Only populated when + Only used when [SummarySpec.ignore_adversarial_query][google.cloud.discoveryengine.v1beta.SearchRequest.ContentSearchSpec.SummarySpec.ignore_adversarial_query] is set to ``true``. NON_SUMMARY_SEEKING_QUERY_IGNORED (2): The non-summary seeking query ignored case. - Only populated when + Only used when [SummarySpec.ignore_non_summary_seeking_query][google.cloud.discoveryengine.v1beta.SearchRequest.ContentSearchSpec.SummarySpec.ignore_non_summary_seeking_query] is set to ``true``. OUT_OF_DOMAIN_QUERY_IGNORED (3): @@ -1806,6 +1861,19 @@ class SummarySkippedReason(proto.Enum): Google skips the summary if there is no relevant content in the retrieved search results. + JAIL_BREAKING_QUERY_IGNORED (7): + The jail-breaking query ignored case. + + For example, "Reply in the tone of a competing company's + CEO". Only used when + [SearchRequest.ContentSearchSpec.SummarySpec.ignore_jail_breaking_query] + is set to ``true``. + CUSTOMER_POLICY_VIOLATION (8): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. """ SUMMARY_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -1814,6 +1882,8 @@ class SummarySkippedReason(proto.Enum): POTENTIAL_POLICY_VIOLATION = 4 LLM_ADDON_NOT_ENABLED = 5 NO_RELEVANT_CONTENT = 6 + JAIL_BREAKING_QUERY_IGNORED = 7 + CUSTOMER_POLICY_VIOLATION = 8 class SafetyAttributes(proto.Message): r"""Safety Attribute categories and their associated confidence @@ -2098,6 +2168,9 @@ class StringConstraint(proto.Message): Values of the string field. The record will only be returned if the field value matches one of the values specified here. + query_segment (str): + Identifies the keywords within the search + query that match a filter. """ field_name: str = proto.Field( @@ -2108,6 +2181,10 @@ class StringConstraint(proto.Message): proto.STRING, number=2, ) + query_segment: str = proto.Field( + proto.STRING, + number=3, + ) class NumberConstraint(proto.Message): r"""Constraint expression of a number field. Example: price < @@ -2124,6 +2201,9 @@ class NumberConstraint(proto.Message): value (float): The value specified in the numerical constraint. + query_segment (str): + Identifies the keywords within the search + query that match a filter. """ class Comparison(proto.Enum): @@ -2163,6 +2243,10 @@ class Comparison(proto.Enum): proto.DOUBLE, number=3, ) + query_segment: str = proto.Field( + proto.STRING, + number=4, + ) class GeolocationConstraint(proto.Message): r"""Constraint of a geolocation field. @@ -2177,6 +2261,12 @@ class GeolocationConstraint(proto.Message): the input query. The proximity of the reference address to the geolocation field will be used to filter the results. + latitude (float): + The latitude of the geolocation inferred from + the input query. + longitude (float): + The longitude of the geolocation inferred + from the input query. radius_in_meters (float): The radius in meters around the address. The record is returned if the location of the @@ -2191,6 +2281,14 @@ class GeolocationConstraint(proto.Message): proto.STRING, number=2, ) + latitude: float = proto.Field( + proto.DOUBLE, + number=4, + ) + longitude: float = proto.Field( + proto.DOUBLE, + number=5, + ) radius_in_meters: float = proto.Field( proto.FLOAT, number=3, @@ -2344,6 +2442,48 @@ class SessionInfo(proto.Message): number=2, ) + class OneBoxResult(proto.Message): + r"""OneBoxResult is a holder for all results of specific type + that we want to display in UI differently. + + Attributes: + one_box_type (google.cloud.discoveryengine_v1beta.types.SearchResponse.OneBoxResult.OneBoxType): + The type of One Box result. + search_results (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchResponse.SearchResult]): + The search results for this One Box. + """ + + class OneBoxType(proto.Enum): + r"""The type of One Box result. + + Values: + ONE_BOX_TYPE_UNSPECIFIED (0): + Default value. Should not be used. + PEOPLE (1): + One Box result contains people results. + ORGANIZATION (2): + One Box result contains organization results. + SLACK (3): + One Box result contains slack results. + """ + ONE_BOX_TYPE_UNSPECIFIED = 0 + PEOPLE = 1 + ORGANIZATION = 2 + SLACK = 3 + + one_box_type: "SearchResponse.OneBoxResult.OneBoxType" = proto.Field( + proto.ENUM, + number=1, + enum="SearchResponse.OneBoxResult.OneBoxType", + ) + search_results: MutableSequence[ + "SearchResponse.SearchResult" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="SearchResponse.SearchResult", + ) + @property def raw_page(self): return self @@ -2414,6 +2554,11 @@ def raw_page(self): number=19, message=SessionInfo, ) + one_box_results: MutableSequence[OneBoxResult] = proto.RepeatedField( + proto.MESSAGE, + number=20, + message=OneBoxResult, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py index 23500fae2a57..5c959d423932 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py @@ -673,6 +673,9 @@ class DocumentInfo(proto.Message): The promotion IDs associated with this Document. Currently, this field is restricted to at most one ID. + joined (bool): + Output only. Whether the referenced Document + can be found in the data store. """ id: str = proto.Field( @@ -699,6 +702,10 @@ class DocumentInfo(proto.Message): proto.STRING, number=4, ) + joined: bool = proto.Field( + proto.BOOL, + number=5, + ) class PanelInfo(proto.Message): diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_async.py new file mode 100644 index 000000000000..ff9157e921d6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_sync.py new file mode 100644 index 000000000000..84861f6a63ac --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_async.py index dded80300569..7e27f4affa4f 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_async.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_async.py @@ -39,7 +39,11 @@ async def sample_purge_documents(): client = discoveryengine_v1.DocumentServiceAsyncClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_sync.py index d0d2932cf202..d051616047f7 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_sync.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_sync.py @@ -39,7 +39,11 @@ def sample_purge_documents(): client = discoveryengine_v1.DocumentServiceClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_get_resource_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_async.py similarity index 71% rename from packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_get_resource_async.py rename to packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_async.py index b11e6fe6d708..9d7a80ef3114 100644 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_get_resource_async.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetResource +# Snippet for ListCustomModels # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-gke-connect-gateway +# python3 -m pip install google-cloud-discoveryengine -# [START connectgateway_v1beta1_generated_GatewayService_GetResource_async] +# [START discoveryengine_v1_generated_SearchTuningService_ListCustomModels_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore -from google.cloud.gkeconnect import gateway_v1beta1 +from google.cloud import discoveryengine_v1 -async def sample_get_resource(): +async def sample_list_custom_models(): # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() + client = discoveryengine_v1.SearchTuningServiceAsyncClient() # Initialize request argument(s) - request = httpbody_pb2.HttpBody( + request = discoveryengine_v1.ListCustomModelsRequest( + data_store="data_store_value", ) # Make the request - response = await client.get_resource(request=request) + response = await client.list_custom_models(request=request) # Handle the response print(response) -# [END connectgateway_v1beta1_generated_GatewayService_GetResource_async] +# [END discoveryengine_v1_generated_SearchTuningService_ListCustomModels_async] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_delete_resource_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_sync.py similarity index 71% rename from packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_delete_resource_sync.py rename to packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_sync.py index 3177e20c062a..faedb982f00e 100644 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_delete_resource_sync.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for DeleteResource +# Snippet for ListCustomModels # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-gke-connect-gateway +# python3 -m pip install google-cloud-discoveryengine -# [START connectgateway_v1beta1_generated_GatewayService_DeleteResource_sync] +# [START discoveryengine_v1_generated_SearchTuningService_ListCustomModels_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore -from google.cloud.gkeconnect import gateway_v1beta1 +from google.cloud import discoveryengine_v1 -def sample_delete_resource(): +def sample_list_custom_models(): # Create a client - client = gateway_v1beta1.GatewayServiceClient() + client = discoveryengine_v1.SearchTuningServiceClient() # Initialize request argument(s) - request = httpbody_pb2.HttpBody( + request = discoveryengine_v1.ListCustomModelsRequest( + data_store="data_store_value", ) # Make the request - response = client.delete_resource(request=request) + response = client.list_custom_models(request=request) # Handle the response print(response) -# [END connectgateway_v1beta1_generated_GatewayService_DeleteResource_sync] +# [END discoveryengine_v1_generated_SearchTuningService_ListCustomModels_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_async.py new file mode 100644 index 000000000000..ba97a5e74862 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TrainCustomModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_train_custom_model(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.TrainCustomModelRequest( + data_store="data_store_value", + ) + + # Make the request + operation = client.train_custom_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_sync.py new file mode 100644 index 000000000000..422d9bcdcc6d --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TrainCustomModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_train_custom_model(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.TrainCustomModelRequest( + data_store="data_store_value", + ) + + # Make the request + operation = client.train_custom_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_async.py new file mode 100644 index 000000000000..13cc76da400c --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PurgeUserEvents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_UserEventService_PurgeUserEvents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_purge_user_events(): + # Create a client + client = discoveryengine_v1.UserEventServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_user_events(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_UserEventService_PurgeUserEvents_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_sync.py new file mode 100644 index 000000000000..eeab6a2ea2b3 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PurgeUserEvents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_UserEventService_PurgeUserEvents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_purge_user_events(): + # Create a client + client = discoveryengine_v1.UserEventServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_user_events(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_UserEventService_PurgeUserEvents_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_async.py new file mode 100644 index 000000000000..8a98d99976c6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1alpha.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_sync.py new file mode 100644 index 000000000000..972a126db36b --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1alpha.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_async.py new file mode 100644 index 000000000000..37e8933b085d --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1beta.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_sync.py new file mode 100644 index 000000000000..2164801056cc --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1beta.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_async.py index 204de42ab390..d57fdf0e3be2 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_async.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_async.py @@ -39,7 +39,11 @@ async def sample_purge_documents(): client = discoveryengine_v1beta.DocumentServiceAsyncClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1beta.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1beta.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_sync.py index d4538c05c707..6819052d64c8 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_sync.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_sync.py @@ -39,7 +39,11 @@ def sample_purge_documents(): client = discoveryengine_v1beta.DocumentServiceClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1beta.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1beta.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json index 69944fc7ce8a..747564c4a6ad 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.1.0" + "version": "0.12.2" }, "snippets": [ { @@ -4541,6 +4541,167 @@ ], "title": "discoveryengine_v1_generated_data_store_service_update_data_store_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1_generated_document_service_batch_get_documents_metadata_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_batch_get_documents_metadata_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1_generated_document_service_batch_get_documents_metadata_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_batch_get_documents_metadata_sync.py" + }, { "canonical": true, "clientMethod": { @@ -5393,12 +5554,12 @@ "regionTag": "discoveryengine_v1_generated_DocumentService_PurgeDocuments_async", "segments": [ { - "end": 56, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 60, "start": 27, "type": "SHORT" }, @@ -5408,18 +5569,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], @@ -5469,12 +5630,12 @@ "regionTag": "discoveryengine_v1_generated_DocumentService_PurgeDocuments_sync", "segments": [ { - "end": 56, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 60, "start": 27, "type": "SHORT" }, @@ -5484,18 +5645,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], @@ -8090,22 +8251,22 @@ "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", - "shortName": "SiteSearchEngineServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceAsyncClient", + "shortName": "SearchTuningServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.batch_create_target_sites", + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceAsyncClient.list_custom_models", "method": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites", + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels", "service": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", - "shortName": "SiteSearchEngineService" + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService", + "shortName": "SearchTuningService" }, - "shortName": "BatchCreateTargetSites" + "shortName": "ListCustomModels" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesRequest" + "type": "google.cloud.discoveryengine_v1.types.ListCustomModelsRequest" }, { "name": "retry", @@ -8120,22 +8281,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "batch_create_target_sites" + "resultType": "google.cloud.discoveryengine_v1.types.ListCustomModelsResponse", + "shortName": "list_custom_models" }, - "description": "Sample for BatchCreateTargetSites", - "file": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_async.py", + "description": "Sample for ListCustomModels", + "file": "discoveryengine_v1_generated_search_tuning_service_list_custom_models_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_async", + "regionTag": "discoveryengine_v1_generated_SearchTuningService_ListCustomModels_async", "segments": [ { - "end": 60, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 51, "start": 27, "type": "SHORT" }, @@ -8145,43 +8306,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_async.py" + "title": "discoveryengine_v1_generated_search_tuning_service_list_custom_models_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", - "shortName": "SiteSearchEngineServiceClient" + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceClient", + "shortName": "SearchTuningServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.batch_create_target_sites", + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceClient.list_custom_models", "method": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites", + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels", "service": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", - "shortName": "SiteSearchEngineService" + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService", + "shortName": "SearchTuningService" }, - "shortName": "BatchCreateTargetSites" + "shortName": "ListCustomModels" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesRequest" + "type": "google.cloud.discoveryengine_v1.types.ListCustomModelsRequest" }, { "name": "retry", @@ -8196,22 +8357,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "batch_create_target_sites" + "resultType": "google.cloud.discoveryengine_v1.types.ListCustomModelsResponse", + "shortName": "list_custom_models" }, - "description": "Sample for BatchCreateTargetSites", - "file": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_sync.py", + "description": "Sample for ListCustomModels", + "file": "discoveryengine_v1_generated_search_tuning_service_list_custom_models_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_sync", + "regionTag": "discoveryengine_v1_generated_SearchTuningService_ListCustomModels_sync", "segments": [ { - "end": 60, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 51, "start": 27, "type": "SHORT" }, @@ -8221,44 +8382,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_sync.py" + "title": "discoveryengine_v1_generated_search_tuning_service_list_custom_models_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", - "shortName": "SiteSearchEngineServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceAsyncClient", + "shortName": "SearchTuningServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.batch_verify_target_sites", + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceAsyncClient.train_custom_model", "method": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites", + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel", "service": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", - "shortName": "SiteSearchEngineService" + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService", + "shortName": "SearchTuningService" }, - "shortName": "BatchVerifyTargetSites" + "shortName": "TrainCustomModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesRequest" + "type": "google.cloud.discoveryengine_v1.types.TrainCustomModelRequest" }, { "name": "retry", @@ -8274,13 +8435,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "batch_verify_target_sites" + "shortName": "train_custom_model" }, - "description": "Sample for BatchVerifyTargetSites", - "file": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_async.py", + "description": "Sample for TrainCustomModel", + "file": "discoveryengine_v1_generated_search_tuning_service_train_custom_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_async", + "regionTag": "discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_async", "segments": [ { "end": 55, @@ -8313,28 +8474,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_async.py" + "title": "discoveryengine_v1_generated_search_tuning_service_train_custom_model_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", - "shortName": "SiteSearchEngineServiceClient" + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceClient", + "shortName": "SearchTuningServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.batch_verify_target_sites", + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceClient.train_custom_model", "method": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites", + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel", "service": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", - "shortName": "SiteSearchEngineService" + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService", + "shortName": "SearchTuningService" }, - "shortName": "BatchVerifyTargetSites" + "shortName": "TrainCustomModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesRequest" + "type": "google.cloud.discoveryengine_v1.types.TrainCustomModelRequest" }, { "name": "retry", @@ -8350,13 +8511,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "batch_verify_target_sites" + "shortName": "train_custom_model" }, - "description": "Sample for BatchVerifyTargetSites", - "file": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_sync.py", + "description": "Sample for TrainCustomModel", + "file": "discoveryengine_v1_generated_search_tuning_service_train_custom_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_sync", + "regionTag": "discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_sync", "segments": [ { "end": 55, @@ -8389,7 +8550,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_sync.py" + "title": "discoveryengine_v1_generated_search_tuning_service_train_custom_model_sync.py" }, { "canonical": true, @@ -8399,14 +8560,320 @@ "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.create_target_site", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.batch_create_target_sites", "method": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.CreateTargetSite", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites", "service": { "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", "shortName": "SiteSearchEngineService" }, - "shortName": "CreateTargetSite" + "shortName": "BatchCreateTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_create_target_sites" + }, + "description": "Sample for BatchCreateTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.batch_create_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "BatchCreateTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_create_target_sites" + }, + "description": "Sample for BatchCreateTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.batch_verify_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "BatchVerifyTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_verify_target_sites" + }, + "description": "Sample for BatchVerifyTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.batch_verify_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "BatchVerifyTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_verify_target_sites" + }, + "description": "Sample for BatchVerifyTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.create_target_site", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.CreateTargetSite", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "CreateTargetSite" }, "parameters": [ { @@ -10283,6 +10750,159 @@ ], "title": "discoveryengine_v1_generated_user_event_service_import_user_events_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceAsyncClient", + "shortName": "UserEventServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceAsyncClient.purge_user_events", + "method": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService.PurgeUserEvents", + "service": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService", + "shortName": "UserEventService" + }, + "shortName": "PurgeUserEvents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.PurgeUserEventsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "purge_user_events" + }, + "description": "Sample for PurgeUserEvents", + "file": "discoveryengine_v1_generated_user_event_service_purge_user_events_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_UserEventService_PurgeUserEvents_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_user_event_service_purge_user_events_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceClient", + "shortName": "UserEventServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceClient.purge_user_events", + "method": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService.PurgeUserEvents", + "service": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService", + "shortName": "UserEventService" + }, + "shortName": "PurgeUserEvents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.PurgeUserEventsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "purge_user_events" + }, + "description": "Sample for PurgeUserEvents", + "file": "discoveryengine_v1_generated_user_event_service_purge_user_events_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_UserEventService_PurgeUserEvents_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_user_event_service_purge_user_events_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json index 2884e903e0cf..c1a9f90dc413 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.1.0" + "version": "0.12.2" }, "snippets": [ { @@ -5507,6 +5507,167 @@ ], "title": "discoveryengine_v1alpha_generated_data_store_service_update_document_processing_config_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.DocumentServiceAsyncClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.DocumentServiceClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json index cff39c47dc30..6946ceb3a447 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.1.0" + "version": "0.12.2" }, "snippets": [ { @@ -4541,6 +4541,167 @@ ], "title": "discoveryengine_v1beta_generated_data_store_service_update_data_store_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_sync.py" + }, { "canonical": true, "clientMethod": { @@ -5393,12 +5554,12 @@ "regionTag": "discoveryengine_v1beta_generated_DocumentService_PurgeDocuments_async", "segments": [ { - "end": 56, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 60, "start": 27, "type": "SHORT" }, @@ -5408,18 +5569,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], @@ -5469,12 +5630,12 @@ "regionTag": "discoveryengine_v1beta_generated_DocumentService_PurgeDocuments_sync", "segments": [ { - "end": 56, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 60, "start": 27, "type": "SHORT" }, @@ -5484,18 +5645,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py index c9797094604f..b89b5e57f538 100644 --- a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py +++ b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py @@ -41,6 +41,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'answer_query': ('serving_config', 'query', 'session', 'safety_spec', 'related_questions_spec', 'answer_generation_spec', 'search_spec', 'query_understanding_spec', 'asynchronous_mode', 'user_pseudo_id', 'user_labels', ), 'batch_create_target_sites': ('parent', 'requests', ), + 'batch_get_documents_metadata': ('parent', 'matcher', ), 'batch_verify_target_sites': ('parent', ), 'check_grounding': ('grounding_config', 'answer_candidate', 'facts', 'grounding_spec', 'user_labels', ), 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ), @@ -48,7 +49,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'converse_conversation': ('name', 'query', 'serving_config', 'conversation', 'safe_search', 'user_labels', 'summary_spec', 'filter', 'boost_spec', ), 'create_control': ('parent', 'control', 'control_id', ), 'create_conversation': ('parent', 'conversation', ), - 'create_data_store': ('parent', 'data_store', 'data_store_id', 'create_advanced_site_search', ), + 'create_data_store': ('parent', 'data_store', 'data_store_id', 'create_advanced_site_search', 'skip_default_schema_creation', ), 'create_document': ('parent', 'document', 'document_id', ), 'create_engine': ('parent', 'engine', 'engine_id', ), 'create_schema': ('parent', 'schema', 'schema_id', ), @@ -81,6 +82,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'import_user_events': ('parent', 'inline_source', 'gcs_source', 'bigquery_source', 'error_config', ), 'list_controls': ('parent', 'page_size', 'page_token', 'filter', ), 'list_conversations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_custom_models': ('data_store', ), 'list_data_stores': ('parent', 'page_size', 'page_token', 'filter', ), 'list_documents': ('parent', 'page_size', 'page_token', ), 'list_engines': ('parent', 'page_size', 'page_token', 'filter', ), @@ -89,12 +91,14 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'list_target_sites': ('parent', 'page_size', 'page_token', ), 'provision_project': ('name', 'accept_data_use_terms', 'data_use_terms_version', ), 'purge_completion_suggestions': ('parent', ), - 'purge_documents': ('parent', 'filter', 'force', ), + 'purge_documents': ('parent', 'filter', 'gcs_source', 'inline_source', 'error_config', 'force', ), 'purge_suggestion_deny_list_entries': ('parent', ), + 'purge_user_events': ('parent', 'filter', 'force', ), 'rank': ('ranking_config', 'records', 'model', 'top_n', 'query', 'ignore_record_details_in_response', 'user_labels', ), 'recommend': ('serving_config', 'user_event', 'page_size', 'filter', 'validate_only', 'params', 'user_labels', ), 'recrawl_uris': ('site_search_engine', 'uris', ), 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'data_store_specs', 'filter', 'canonical_filter', 'order_by', 'user_info', 'language_code', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'safe_search', 'user_labels', 'search_as_you_type_spec', 'session', 'session_spec', ), + 'train_custom_model': ('data_store', 'gcs_training_input', 'model_type', 'error_config', 'model_id', ), 'update_control': ('control', 'update_mask', ), 'update_conversation': ('conversation', 'update_mask', ), 'update_data_store': ('data_store', 'update_mask', ), diff --git a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py index e10372b3b1fa..d82dccd4ad51 100644 --- a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py +++ b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py @@ -41,6 +41,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'answer_query': ('serving_config', 'query', 'session', 'safety_spec', 'related_questions_spec', 'answer_generation_spec', 'search_spec', 'query_understanding_spec', 'asynchronous_mode', 'user_pseudo_id', 'user_labels', ), 'batch_create_target_sites': ('parent', 'requests', ), + 'batch_get_documents_metadata': ('parent', 'matcher', ), 'batch_verify_target_sites': ('parent', ), 'check_grounding': ('grounding_config', 'answer_candidate', 'facts', 'grounding_spec', 'user_labels', ), 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ), diff --git a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py index 8a4765d046fe..f2f74d58b3d5 100644 --- a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py +++ b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py @@ -41,6 +41,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'answer_query': ('serving_config', 'query', 'session', 'safety_spec', 'related_questions_spec', 'answer_generation_spec', 'search_spec', 'query_understanding_spec', 'asynchronous_mode', 'user_pseudo_id', 'user_labels', ), 'batch_create_target_sites': ('parent', 'requests', ), + 'batch_get_documents_metadata': ('parent', 'matcher', ), 'batch_verify_target_sites': ('parent', ), 'check_grounding': ('grounding_config', 'answer_candidate', 'facts', 'grounding_spec', 'user_labels', ), 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ), @@ -106,14 +107,14 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'pause_engine': ('name', ), 'provision_project': ('name', 'accept_data_use_terms', 'data_use_terms_version', ), 'purge_completion_suggestions': ('parent', ), - 'purge_documents': ('parent', 'filter', 'force', ), + 'purge_documents': ('parent', 'filter', 'gcs_source', 'inline_source', 'error_config', 'force', ), 'purge_suggestion_deny_list_entries': ('parent', ), 'purge_user_events': ('parent', 'filter', 'force', ), 'rank': ('ranking_config', 'records', 'model', 'top_n', 'query', 'ignore_record_details_in_response', 'user_labels', ), 'recommend': ('serving_config', 'user_event', 'page_size', 'filter', 'validate_only', 'params', 'user_labels', ), 'recrawl_uris': ('site_search_engine', 'uris', ), 'resume_engine': ('name', ), - 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'data_store_specs', 'filter', 'canonical_filter', 'order_by', 'user_info', 'language_code', 'region_code', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'embedding_spec', 'ranking_expression', 'safe_search', 'user_labels', 'natural_language_query_understanding_spec', 'search_as_you_type_spec', 'session', 'session_spec', ), + 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'data_store_specs', 'filter', 'canonical_filter', 'order_by', 'user_info', 'language_code', 'region_code', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'embedding_spec', 'ranking_expression', 'safe_search', 'user_labels', 'natural_language_query_understanding_spec', 'search_as_you_type_spec', 'session', 'session_spec', 'relevance_threshold', ), 'train_custom_model': ('data_store', 'gcs_training_input', 'model_type', 'error_config', 'model_id', ), 'tune_engine': ('name', ), 'update_control': ('control', 'update_mask', ), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py index 9459a96eceda..c3bd0fd5703a 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py @@ -3556,6 +3556,7 @@ def test_create_data_store_rest_required_fields( ( "create_advanced_site_search", "data_store_id", + "skip_default_schema_creation", ) ) jsonified_request.update(unset_fields) @@ -3622,6 +3623,7 @@ def test_create_data_store_rest_unset_required_fields(): ( "createAdvancedSiteSearch", "dataStoreId", + "skipDefaultSchemaCreation", ) ) & set( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py index 207850afb022..2ad97529f22b 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py @@ -45,10 +45,12 @@ from google.cloud.location import locations_pb2 from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.type import date_pb2 # type: ignore import grpc from grpc.experimental import aio @@ -3911,6 +3913,387 @@ async def test_purge_documents_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + document_service.BatchGetDocumentsMetadataRequest, + dict, + ], +) +def test_batch_get_documents_metadata(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + response = client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +def test_batch_get_documents_metadata_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +def test_batch_get_documents_metadata_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + +def test_batch_get_documents_metadata_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_get_documents_metadata + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_get_documents_metadata + ] = mock_rpc + request = {} + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.batch_get_documents_metadata + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.batch_get_documents_metadata + ] = mock_rpc + + request = {} + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async( + transport: str = "grpc_asyncio", + request_type=document_service.BatchGetDocumentsMetadataRequest, +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_from_dict(): + await test_batch_get_documents_metadata_async(request_type=dict) + + +def test_batch_get_documents_metadata_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_batch_get_documents_metadata_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_batch_get_documents_metadata_flattened_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_error_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3918,7 +4301,321 @@ async def test_purge_documents_field_headers_async(): dict, ], ) -def test_get_document_rest(request_type): +def test_get_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +def test_get_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + + request = {} + client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_document_rest_required_fields( + request_type=document_service.GetDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_get_document" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_get_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service.GetDocumentRequest.pb( + document_service.GetDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document.Document.to_json(document.Document()) + + request = document_service.GetDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document.Document() + + client.get_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_document_rest_bad_request( + transport: str = "rest", request_type=document_service.GetDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_document(request) + + +def test_get_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + % client.transport._host, + args[1], + ) + + +def test_get_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_document( + document_service.GetDocumentRequest(), + name="name_value", + ) + + +def test_get_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.ListDocumentsRequest, + dict, + ], +) +def test_list_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3926,41 +4623,34 @@ def test_get_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", + return_value = document_service.ListDocumentsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_document_rest_use_cached_wrapped_rpc(): +def test_list_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3974,35 +4664,35 @@ def test_get_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_document in client._transport._wrapped_methods + assert client._transport.list_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc request = {} - client.get_document(request) + client.list_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_document(request) + client.list_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_document_rest_required_fields( - request_type=document_service.GetDocumentRequest, +def test_list_documents_rest_required_fields( + request_type=document_service.ListDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4013,21 +4703,28 @@ def test_get_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4036,7 +4733,7 @@ def test_get_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4057,30 +4754,38 @@ def test_get_document_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_document_rest_unset_required_fields(): +def test_list_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_document_rest_interceptors(null_interceptor): +def test_list_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4093,14 +4798,14 @@ def test_get_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_get_document" + transports.DocumentServiceRestInterceptor, "post_list_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_get_document" + transports.DocumentServiceRestInterceptor, "pre_list_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.GetDocumentRequest.pb( - document_service.GetDocumentRequest() + pb_message = document_service.ListDocumentsRequest.pb( + document_service.ListDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -4112,17 +4817,19 @@ def test_get_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document.Document.to_json(document.Document()) + req.return_value._content = document_service.ListDocumentsResponse.to_json( + document_service.ListDocumentsResponse() + ) - request = document_service.GetDocumentRequest() + request = document_service.ListDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document.Document() + post.return_value = document_service.ListDocumentsResponse() - client.get_document( + client.list_documents( request, metadata=[ ("key", "val"), @@ -4134,8 +4841,8 @@ def test_get_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_document_rest_bad_request( - transport: str = "rest", request_type=document_service.GetDocumentRequest +def test_list_documents_rest_bad_request( + transport: str = "rest", request_type=document_service.ListDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4144,7 +4851,7 @@ def test_get_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -4157,10 +4864,10 @@ def test_get_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_document(request) + client.list_documents(request) -def test_get_document_rest_flattened(): +def test_list_documents_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4169,16 +4876,16 @@ def test_get_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -4186,25 +4893,25 @@ def test_get_document_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_document(**mock_args) + client.list_documents(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" % client.transport._host, args[1], ) -def test_get_document_rest_flattened_error(transport: str = "rest"): +def test_list_documents_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4213,61 +4920,224 @@ def test_get_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_document( - document_service.GetDocumentRequest(), - name="name_value", + client.list_documents( + document_service.ListDocumentsRequest(), + parent="parent_value", + ) + + +def test_list_documents_rest_pager(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_service.ListDocumentsResponse.to_json(x) for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + pager = client.list_documents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, document.Document) for i in results) + + pages = list(client.list_documents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.CreateDocumentRequest, + dict, + ], +) +def test_create_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + request_init["document"] = { + "struct_data": {"fields": {}}, + "json_data": "json_data_value", + "name": "name_value", + "id": "id_value", + "schema_id": "schema_id_value", + "content": { + "raw_bytes": b"raw_bytes_blob", + "uri": "uri_value", + "mime_type": "mime_type_value", + }, + "parent_document_id": "parent_document_id_value", + "derived_struct_data": {}, + "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = document_service.CreateDocumentRequest.meta.fields["document"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -def test_get_document_rest_error(): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["document"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - document_service.ListDocumentsRequest, - dict, - ], -) -def test_list_documents_rest(request_type): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" - } + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["document"][field])): + del request_init["document"][field][i][subfield] + else: + del request_init["document"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse( - next_page_token="next_page_token_value", + return_value = gcd_document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gcd_document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" -def test_list_documents_rest_use_cached_wrapped_rpc(): +def test_create_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4281,35 +5151,36 @@ def test_list_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_documents in client._transport._wrapped_methods + assert client._transport.create_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc + client._transport._wrapped_methods[client._transport.create_document] = mock_rpc request = {} - client.list_documents(request) + client.create_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_documents(request) + client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_documents_rest_required_fields( - request_type=document_service.ListDocumentsRequest, +def test_create_document_rest_required_fields( + request_type=document_service.CreateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4317,31 +5188,32 @@ def test_list_documents_rest_required_fields( ) # verify fields with default values are dropped + assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == request_init["document_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("document_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4350,7 +5222,7 @@ def test_list_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4362,47 +5234,55 @@ def test_list_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "documentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_documents_rest_unset_required_fields(): +def test_create_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_documents._get_unset_required_fields({}) + unset_fields = transport.create_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("documentId",)) + & set( ( - "pageSize", - "pageToken", + "parent", + "document", + "documentId", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_documents_rest_interceptors(null_interceptor): +def test_create_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4415,14 +5295,14 @@ def test_list_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_list_documents" + transports.DocumentServiceRestInterceptor, "post_create_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_list_documents" + transports.DocumentServiceRestInterceptor, "pre_create_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.ListDocumentsRequest.pb( - document_service.ListDocumentsRequest() + pb_message = document_service.CreateDocumentRequest.pb( + document_service.CreateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -4434,19 +5314,19 @@ def test_list_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document_service.ListDocumentsResponse.to_json( - document_service.ListDocumentsResponse() + req.return_value._content = gcd_document.Document.to_json( + gcd_document.Document() ) - request = document_service.ListDocumentsRequest() + request = document_service.CreateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document_service.ListDocumentsResponse() + post.return_value = gcd_document.Document() - client.list_documents( + client.create_document( request, metadata=[ ("key", "val"), @@ -4458,8 +5338,8 @@ def test_list_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_documents_rest_bad_request( - transport: str = "rest", request_type=document_service.ListDocumentsRequest +def test_create_document_rest_bad_request( + transport: str = "rest", request_type=document_service.CreateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4481,10 +5361,10 @@ def test_list_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_documents(request) + client.create_document(request) -def test_list_documents_rest_flattened(): +def test_create_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4493,7 +5373,7 @@ def test_list_documents_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # get arguments that satisfy an http rule for this method sample_request = { @@ -4503,6 +5383,16 @@ def test_list_documents_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", ) mock_args.update(sample_request) @@ -4510,12 +5400,12 @@ def test_list_documents_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_documents(**mock_args) + client.create_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -4528,94 +5418,45 @@ def test_list_documents_rest_flattened(): ) -def test_list_documents_rest_flattened_error(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_documents( - document_service.ListDocumentsRequest(), - parent="parent_value", - ) - - -def test_list_documents_rest_pager(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - document_service.ListDocumentsResponse( - documents=[], - next_page_token="def", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token="ghi", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - document_service.ListDocumentsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" - } +def test_create_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pager = client.list_documents(request=sample_request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_document( + document_service.CreateDocumentRequest(), + parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, document.Document) for i in results) - pages = list(client.list_documents(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_create_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - document_service.CreateDocumentRequest, + document_service.UpdateDocumentRequest, dict, ], ) -def test_create_document_rest(request_type): +def test_update_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4623,12 +5464,14 @@ def test_create_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request_init["document"] = { "struct_data": {"fields": {}}, "json_data": "json_data_value", - "name": "name_value", + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", "id": "id_value", "schema_id": "schema_id_value", "content": { @@ -4639,13 +5482,28 @@ def test_create_document_rest(request_type): "parent_document_id": "parent_document_id_value", "derived_struct_data": {}, "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = document_service.CreateDocumentRequest.meta.fields["document"] + test_field = document_service.UpdateDocumentRequest.meta.fields["document"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -4729,7 +5587,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) # Establish that the response is the type that we expect. assert isinstance(response, gcd_document.Document) @@ -4739,7 +5597,7 @@ def get_message_fields(field): assert response.parent_document_id == "parent_document_id_value" -def test_create_document_rest_use_cached_wrapped_rpc(): +def test_update_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4753,36 +5611,34 @@ def test_create_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_document in client._transport._wrapped_methods + assert client._transport.update_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_document] = mock_rpc + client._transport._wrapped_methods[client._transport.update_document] = mock_rpc request = {} - client.create_document(request) + client.update_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_document(request) + client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_document_rest_required_fields( - request_type=document_service.CreateDocumentRequest, +def test_update_document_rest_required_fields( + request_type=document_service.UpdateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4790,32 +5646,27 @@ def test_create_document_rest_required_fields( ) # verify fields with default values are dropped - assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == request_init["document_id"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("document_id",)) + assert not set(unset_fields) - set( + ( + "allow_missing", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4836,7 +5687,7 @@ def test_create_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -4852,39 +5703,32 @@ def test_create_document_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) - expected_params = [ - ( - "documentId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_document_rest_unset_required_fields(): +def test_update_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_document._get_unset_required_fields({}) + unset_fields = transport.update_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("documentId",)) - & set( + set( ( - "parent", - "document", - "documentId", + "allowMissing", + "updateMask", ) ) + & set(("document",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_document_rest_interceptors(null_interceptor): +def test_update_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4897,14 +5741,14 @@ def test_create_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_create_document" + transports.DocumentServiceRestInterceptor, "post_update_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_create_document" + transports.DocumentServiceRestInterceptor, "pre_update_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.CreateDocumentRequest.pb( - document_service.CreateDocumentRequest() + pb_message = document_service.UpdateDocumentRequest.pb( + document_service.UpdateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -4920,7 +5764,7 @@ def test_create_document_rest_interceptors(null_interceptor): gcd_document.Document() ) - request = document_service.CreateDocumentRequest() + request = document_service.UpdateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4928,7 +5772,7 @@ def test_create_document_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = gcd_document.Document() - client.create_document( + client.update_document( request, metadata=[ ("key", "val"), @@ -4940,8 +5784,8 @@ def test_create_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_document_rest_bad_request( - transport: str = "rest", request_type=document_service.CreateDocumentRequest +def test_update_document_rest_bad_request( + transport: str = "rest", request_type=document_service.UpdateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4950,7 +5794,9 @@ def test_create_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request = request_type(**request_init) @@ -4963,10 +5809,10 @@ def test_create_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_document(request) + client.update_document(request) -def test_create_document_rest_flattened(): +def test_update_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4979,12 +5825,13 @@ def test_create_document_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -4994,7 +5841,7 @@ def test_create_document_rest_flattened(): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -5007,20 +5854,20 @@ def test_create_document_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_document(**mock_args) + client.update_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" + "%s/v1/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_create_document_rest_flattened_error(transport: str = "rest"): +def test_update_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5029,9 +5876,8 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_document( - document_service.CreateDocumentRequest(), - parent="parent_value", + client.update_document( + document_service.UpdateDocumentRequest(), document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -5041,150 +5887,54 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_document_rest_error(): +def test_update_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - document_service.UpdateDocumentRequest, - dict, - ], -) -def test_update_document_rest(request_type): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - } - request_init["document"] = { - "struct_data": {"fields": {}}, - "json_data": "json_data_value", - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", - "id": "id_value", - "schema_id": "schema_id_value", - "content": { - "raw_bytes": b"raw_bytes_blob", - "uri": "uri_value", - "mime_type": "mime_type_value", - }, - "parent_document_id": "parent_document_id_value", - "derived_struct_data": {}, - "index_time": {"seconds": 751, "nanos": 543}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = document_service.UpdateDocumentRequest.meta.fields["document"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["document"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["document"][field])): - del request_init["document"][field][i][subfield] - else: - del request_init["document"][field][subfield] + +@pytest.mark.parametrize( + "request_type", + [ + document_service.DeleteDocumentRequest, + dict, + ], +) +def test_delete_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, gcd_document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert response is None -def test_update_document_rest_use_cached_wrapped_rpc(): +def test_delete_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5198,34 +5948,35 @@ def test_update_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_document in client._transport._wrapped_methods + assert client._transport.delete_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_document] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc request = {} - client.update_document(request) + client.delete_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_document(request) + client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_document_rest_required_fields( - request_type=document_service.UpdateDocumentRequest, +def test_delete_document_rest_required_fields( + request_type=document_service.DeleteDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5236,24 +5987,21 @@ def test_update_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "update_mask", - ) - ) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5262,7 +6010,7 @@ def test_update_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5274,48 +6022,36 @@ def test_update_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_document_rest_unset_required_fields(): +def test_delete_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_document._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "updateMask", - ) - ) - & set(("document",)) - ) + unset_fields = transport.delete_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_document_rest_interceptors(null_interceptor): +def test_delete_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5328,14 +6064,11 @@ def test_update_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_update_document" - ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_update_document" + transports.DocumentServiceRestInterceptor, "pre_delete_document" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = document_service.UpdateDocumentRequest.pb( - document_service.UpdateDocumentRequest() + pb_message = document_service.DeleteDocumentRequest.pb( + document_service.DeleteDocumentRequest() ) transcode.return_value = { "method": "post", @@ -5347,19 +6080,15 @@ def test_update_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gcd_document.Document.to_json( - gcd_document.Document() - ) - request = document_service.UpdateDocumentRequest() + request = document_service.DeleteDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gcd_document.Document() - client.update_document( + client.delete_document( request, metadata=[ ("key", "val"), @@ -5368,11 +6097,10 @@ def test_update_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_update_document_rest_bad_request( - transport: str = "rest", request_type=document_service.UpdateDocumentRequest +def test_delete_document_rest_bad_request( + transport: str = "rest", request_type=document_service.DeleteDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5381,9 +6109,7 @@ def test_update_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } request = request_type(**request_init) @@ -5396,10 +6122,10 @@ def test_update_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_document(request) + client.delete_document(request) -def test_update_document_rest_flattened(): +def test_delete_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5408,53 +6134,40 @@ def test_update_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } # get truthy value for each flattened field mock_args = dict( - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_document(**mock_args) + client.delete_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_update_document_rest_flattened_error(transport: str = "rest"): +def test_delete_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5463,22 +6176,13 @@ def test_update_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_document( - document_service.UpdateDocumentRequest(), - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_document( + document_service.DeleteDocumentRequest(), + name="name_value", ) -def test_update_document_rest_error(): +def test_delete_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5487,11 +6191,11 @@ def test_update_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - document_service.DeleteDocumentRequest, + import_config.ImportDocumentsRequest, dict, ], ) -def test_delete_document_rest(request_type): +def test_import_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5499,29 +6203,29 @@ def test_delete_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) # Establish that the response is the type that we expect. - assert response is None + assert response.operation.name == "operations/spam" -def test_delete_document_rest_use_cached_wrapped_rpc(): +def test_import_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5535,35 +6239,41 @@ def test_delete_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_document in client._transport._wrapped_methods + assert client._transport.import_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc + client._transport._wrapped_methods[ + client._transport.import_documents + ] = mock_rpc request = {} - client.delete_document(request) + client.import_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_document(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_document_rest_required_fields( - request_type=document_service.DeleteDocumentRequest, +def test_import_documents_rest_required_fields( + request_type=import_config.ImportDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5574,21 +6284,21 @@ def test_delete_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5597,7 +6307,7 @@ def test_delete_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5609,36 +6319,37 @@ def test_delete_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_document_rest_unset_required_fields(): +def test_import_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.import_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_document_rest_interceptors(null_interceptor): +def test_import_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5651,11 +6362,16 @@ def test_delete_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_delete_document" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_import_documents" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_import_documents" ) as pre: pre.assert_not_called() - pb_message = document_service.DeleteDocumentRequest.pb( - document_service.DeleteDocumentRequest() + post.assert_not_called() + pb_message = import_config.ImportDocumentsRequest.pb( + import_config.ImportDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -5667,15 +6383,19 @@ def test_delete_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = document_service.DeleteDocumentRequest() + request = import_config.ImportDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - client.delete_document( + client.import_documents( request, metadata=[ ("key", "val"), @@ -5684,10 +6404,11 @@ def test_delete_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_document_rest_bad_request( - transport: str = "rest", request_type=document_service.DeleteDocumentRequest +def test_import_documents_rest_bad_request( + transport: str = "rest", request_type=import_config.ImportDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5696,7 +6417,7 @@ def test_delete_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -5709,67 +6430,10 @@ def test_delete_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_document(request) - - -def test_delete_document_rest_flattened(): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_document(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_document_rest_flattened_error(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_document( - document_service.DeleteDocumentRequest(), - name="name_value", - ) + client.import_documents(request) -def test_delete_document_rest_error(): +def test_import_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5778,11 +6442,11 @@ def test_delete_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - import_config.ImportDocumentsRequest, + purge_config.PurgeDocumentsRequest, dict, ], ) -def test_import_documents_rest(request_type): +def test_purge_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5806,13 +6470,13 @@ def test_import_documents_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_import_documents_rest_use_cached_wrapped_rpc(): +def test_purge_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5826,19 +6490,17 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.import_documents in client._transport._wrapped_methods + assert client._transport.purge_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_documents - ] = mock_rpc + client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc request = {} - client.import_documents(request) + client.purge_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -5847,20 +6509,21 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.import_documents(request) + client.purge_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_import_documents_rest_required_fields( - request_type=import_config.ImportDocumentsRequest, +def test_purge_documents_rest_required_fields( + request_type=purge_config.PurgeDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["filter"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5871,21 +6534,24 @@ def test_import_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5919,24 +6585,32 @@ def test_import_documents_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_import_documents_rest_unset_required_fields(): +def test_purge_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.import_documents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.purge_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "filter", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_documents_rest_interceptors(null_interceptor): +def test_purge_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5951,14 +6625,14 @@ def test_import_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_import_documents" + transports.DocumentServiceRestInterceptor, "post_purge_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_import_documents" + transports.DocumentServiceRestInterceptor, "pre_purge_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = import_config.ImportDocumentsRequest.pb( - import_config.ImportDocumentsRequest() + pb_message = purge_config.PurgeDocumentsRequest.pb( + purge_config.PurgeDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -5974,7 +6648,7 @@ def test_import_documents_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = import_config.ImportDocumentsRequest() + request = purge_config.PurgeDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -5982,7 +6656,7 @@ def test_import_documents_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.import_documents( + client.purge_documents( request, metadata=[ ("key", "val"), @@ -5994,8 +6668,8 @@ def test_import_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_import_documents_rest_bad_request( - transport: str = "rest", request_type=import_config.ImportDocumentsRequest +def test_purge_documents_rest_bad_request( + transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6017,10 +6691,10 @@ def test_import_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.import_documents(request) + client.purge_documents(request) -def test_import_documents_rest_error(): +def test_purge_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6029,11 +6703,11 @@ def test_import_documents_rest_error(): @pytest.mark.parametrize( "request_type", [ - purge_config.PurgeDocumentsRequest, + document_service.BatchGetDocumentsMetadataRequest, dict, ], ) -def test_purge_documents_rest(request_type): +def test_batch_get_documents_metadata_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6048,22 +6722,26 @@ def test_purge_documents_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document_service.BatchGetDocumentsMetadataResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.batch_get_documents_metadata(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) -def test_purge_documents_rest_use_cached_wrapped_rpc(): +def test_batch_get_documents_metadata_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6077,40 +6755,40 @@ def test_purge_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.purge_documents in client._transport._wrapped_methods + assert ( + client._transport.batch_get_documents_metadata + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc + client._transport._wrapped_methods[ + client._transport.batch_get_documents_metadata + ] = mock_rpc request = {} - client.purge_documents(request) + client.batch_get_documents_metadata(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.purge_documents(request) + client.batch_get_documents_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_purge_documents_rest_required_fields( - request_type=purge_config.PurgeDocumentsRequest, +def test_batch_get_documents_metadata_rest_required_fields( + request_type=document_service.BatchGetDocumentsMetadataRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" - request_init["filter"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6121,24 +6799,23 @@ def test_purge_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" - jsonified_request["filter"] = "filter_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("matcher",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "filter" in jsonified_request - assert jsonified_request["filter"] == "filter_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6147,7 +6824,7 @@ def test_purge_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document_service.BatchGetDocumentsMetadataResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6159,45 +6836,49 @@ def test_purge_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.batch_get_documents_metadata(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_purge_documents_rest_unset_required_fields(): +def test_batch_get_documents_metadata_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.purge_documents._get_unset_required_fields({}) + unset_fields = transport.batch_get_documents_metadata._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("matcher",)) & set( ( "parent", - "filter", + "matcher", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_purge_documents_rest_interceptors(null_interceptor): +def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6210,16 +6891,14 @@ def test_purge_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_purge_documents" + transports.DocumentServiceRestInterceptor, "post_batch_get_documents_metadata" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_purge_documents" + transports.DocumentServiceRestInterceptor, "pre_batch_get_documents_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = purge_config.PurgeDocumentsRequest.pb( - purge_config.PurgeDocumentsRequest() + pb_message = document_service.BatchGetDocumentsMetadataRequest.pb( + document_service.BatchGetDocumentsMetadataRequest() ) transcode.return_value = { "method": "post", @@ -6231,19 +6910,21 @@ def test_purge_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + document_service.BatchGetDocumentsMetadataResponse.to_json( + document_service.BatchGetDocumentsMetadataResponse() + ) ) - request = purge_config.PurgeDocumentsRequest() + request = document_service.BatchGetDocumentsMetadataRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = document_service.BatchGetDocumentsMetadataResponse() - client.purge_documents( + client.batch_get_documents_metadata( request, metadata=[ ("key", "val"), @@ -6255,8 +6936,9 @@ def test_purge_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_purge_documents_rest_bad_request( - transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest +def test_batch_get_documents_metadata_rest_bad_request( + transport: str = "rest", + request_type=document_service.BatchGetDocumentsMetadataRequest, ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6278,10 +6960,71 @@ def test_purge_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.purge_documents(request) + client.batch_get_documents_metadata(request) -def test_purge_documents_rest_error(): +def test_batch_get_documents_metadata_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.BatchGetDocumentsMetadataResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.batch_get_documents_metadata(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata" + % client.transport._host, + args[1], + ) + + +def test_batch_get_documents_metadata_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + +def test_batch_get_documents_metadata_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6433,6 +7176,7 @@ def test_document_service_base_transport(): "delete_document", "import_documents", "purge_documents", + "batch_get_documents_metadata", "get_operation", "cancel_operation", "list_operations", @@ -6737,6 +7481,9 @@ def test_document_service_client_transport_session_collision(transport_name): session1 = client1.transport.purge_documents._session session2 = client2.transport.purge_documents._session assert session1 != session2 + session1 = client1.transport.batch_get_documents_metadata._session + session2 = client2.transport.batch_get_documents_metadata._session + assert session1 != session2 def test_document_service_grpc_transport_channel(): diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_tuning_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_tuning_service.py new file mode 100644 index 000000000000..7f47ed810349 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_tuning_service.py @@ -0,0 +1,3772 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1.services.search_tuning_service import ( + SearchTuningServiceAsyncClient, + SearchTuningServiceClient, + transports, +) +from google.cloud.discoveryengine_v1.types import ( + custom_tuning_model, + import_config, + search_tuning_service, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SearchTuningServiceClient._get_default_mtls_endpoint(None) is None + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + SearchTuningServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + SearchTuningServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert SearchTuningServiceClient._get_client_cert_source(None, False) is None + assert ( + SearchTuningServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + SearchTuningServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + SearchTuningServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + SearchTuningServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + SearchTuningServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = SearchTuningServiceClient._DEFAULT_UNIVERSE + default_endpoint = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + SearchTuningServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + SearchTuningServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == SearchTuningServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SearchTuningServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + SearchTuningServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == SearchTuningServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SearchTuningServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == SearchTuningServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SearchTuningServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + SearchTuningServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + SearchTuningServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + SearchTuningServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + SearchTuningServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + SearchTuningServiceClient._get_universe_domain(None, None) + == SearchTuningServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + SearchTuningServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SearchTuningServiceClient, "grpc"), + (SearchTuningServiceAsyncClient, "grpc_asyncio"), + (SearchTuningServiceClient, "rest"), + ], +) +def test_search_tuning_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SearchTuningServiceGrpcTransport, "grpc"), + (transports.SearchTuningServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SearchTuningServiceRestTransport, "rest"), + ], +) +def test_search_tuning_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SearchTuningServiceClient, "grpc"), + (SearchTuningServiceAsyncClient, "grpc_asyncio"), + (SearchTuningServiceClient, "rest"), + ], +) +def test_search_tuning_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_search_tuning_service_client_get_transport_class(): + transport = SearchTuningServiceClient.get_transport_class() + available_transports = [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceRestTransport, + ] + assert transport in available_transports + + transport = SearchTuningServiceClient.get_transport_class("grpc") + assert transport == transports.SearchTuningServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + SearchTuningServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceAsyncClient), +) +def test_search_tuning_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SearchTuningServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SearchTuningServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + "true", + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + "false", + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + "true", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + SearchTuningServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_search_tuning_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [SearchTuningServiceClient, SearchTuningServiceAsyncClient] +) +@mock.patch.object( + SearchTuningServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchTuningServiceAsyncClient), +) +def test_search_tuning_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [SearchTuningServiceClient, SearchTuningServiceAsyncClient] +) +@mock.patch.object( + SearchTuningServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceAsyncClient), +) +def test_search_tuning_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = SearchTuningServiceClient._DEFAULT_UNIVERSE + default_endpoint = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + ), + ], +) +def test_search_tuning_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + None, + ), + ], +) +def test_search_tuning_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_search_tuning_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1.services.search_tuning_service.transports.SearchTuningServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SearchTuningServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_search_tuning_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.TrainCustomModelRequest, + dict, + ], +) +def test_train_custom_model(request_type, transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = search_tuning_service.TrainCustomModelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_train_custom_model_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.train_custom_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.TrainCustomModelRequest() + + +def test_train_custom_model_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = search_tuning_service.TrainCustomModelRequest( + data_store="data_store_value", + model_type="model_type_value", + model_id="model_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.train_custom_model(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.TrainCustomModelRequest( + data_store="data_store_value", + model_type="model_type_value", + model_id="model_id_value", + ) + + +def test_train_custom_model_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.train_custom_model in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.train_custom_model + ] = mock_rpc + request = {} + client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.train_custom_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_train_custom_model_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.train_custom_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.TrainCustomModelRequest() + + +@pytest.mark.asyncio +async def test_train_custom_model_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.train_custom_model + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.train_custom_model + ] = mock_rpc + + request = {} + await client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.train_custom_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_train_custom_model_async( + transport: str = "grpc_asyncio", + request_type=search_tuning_service.TrainCustomModelRequest, +): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = search_tuning_service.TrainCustomModelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_train_custom_model_async_from_dict(): + await test_train_custom_model_async(request_type=dict) + + +def test_train_custom_model_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.TrainCustomModelRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_train_custom_model_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.TrainCustomModelRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.ListCustomModelsRequest, + dict, + ], +) +def test_list_custom_models(request_type, transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = search_tuning_service.ListCustomModelsResponse() + response = client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = search_tuning_service.ListCustomModelsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, search_tuning_service.ListCustomModelsResponse) + + +def test_list_custom_models_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_custom_models() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.ListCustomModelsRequest() + + +def test_list_custom_models_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = search_tuning_service.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_custom_models(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.ListCustomModelsRequest( + data_store="data_store_value", + ) + + +def test_list_custom_models_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_custom_models in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_custom_models + ] = mock_rpc + request = {} + client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_custom_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_custom_models_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_tuning_service.ListCustomModelsResponse() + ) + response = await client.list_custom_models() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.ListCustomModelsRequest() + + +@pytest.mark.asyncio +async def test_list_custom_models_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_custom_models + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_custom_models + ] = mock_rpc + + request = {} + await client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_custom_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_custom_models_async( + transport: str = "grpc_asyncio", + request_type=search_tuning_service.ListCustomModelsRequest, +): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_tuning_service.ListCustomModelsResponse() + ) + response = await client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = search_tuning_service.ListCustomModelsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, search_tuning_service.ListCustomModelsResponse) + + +@pytest.mark.asyncio +async def test_list_custom_models_async_from_dict(): + await test_list_custom_models_async(request_type=dict) + + +def test_list_custom_models_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.ListCustomModelsRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value = search_tuning_service.ListCustomModelsResponse() + client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_custom_models_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.ListCustomModelsRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_tuning_service.ListCustomModelsResponse() + ) + await client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.TrainCustomModelRequest, + dict, + ], +) +def test_train_custom_model_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.train_custom_model(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_train_custom_model_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.train_custom_model in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.train_custom_model + ] = mock_rpc + + request = {} + client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.train_custom_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_train_custom_model_rest_required_fields( + request_type=search_tuning_service.TrainCustomModelRequest, +): + transport_class = transports.SearchTuningServiceRestTransport + + request_init = {} + request_init["data_store"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).train_custom_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["dataStore"] = "data_store_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).train_custom_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "dataStore" in jsonified_request + assert jsonified_request["dataStore"] == "data_store_value" + + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.train_custom_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_train_custom_model_rest_unset_required_fields(): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.train_custom_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("dataStore",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_train_custom_model_rest_interceptors(null_interceptor): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SearchTuningServiceRestInterceptor(), + ) + client = SearchTuningServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "post_train_custom_model" + ) as post, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "pre_train_custom_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = search_tuning_service.TrainCustomModelRequest.pb( + search_tuning_service.TrainCustomModelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = search_tuning_service.TrainCustomModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.train_custom_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_train_custom_model_rest_bad_request( + transport: str = "rest", request_type=search_tuning_service.TrainCustomModelRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.train_custom_model(request) + + +def test_train_custom_model_rest_error(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.ListCustomModelsRequest, + dict, + ], +) +def test_list_custom_models_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = search_tuning_service.ListCustomModelsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = search_tuning_service.ListCustomModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_custom_models(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, search_tuning_service.ListCustomModelsResponse) + + +def test_list_custom_models_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_custom_models in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_custom_models + ] = mock_rpc + + request = {} + client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_custom_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_custom_models_rest_required_fields( + request_type=search_tuning_service.ListCustomModelsRequest, +): + transport_class = transports.SearchTuningServiceRestTransport + + request_init = {} + request_init["data_store"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_custom_models._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["dataStore"] = "data_store_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_custom_models._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "dataStore" in jsonified_request + assert jsonified_request["dataStore"] == "data_store_value" + + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = search_tuning_service.ListCustomModelsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = search_tuning_service.ListCustomModelsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_custom_models(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_custom_models_rest_unset_required_fields(): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_custom_models._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("dataStore",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_custom_models_rest_interceptors(null_interceptor): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SearchTuningServiceRestInterceptor(), + ) + client = SearchTuningServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "post_list_custom_models" + ) as post, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "pre_list_custom_models" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = search_tuning_service.ListCustomModelsRequest.pb( + search_tuning_service.ListCustomModelsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + search_tuning_service.ListCustomModelsResponse.to_json( + search_tuning_service.ListCustomModelsResponse() + ) + ) + + request = search_tuning_service.ListCustomModelsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = search_tuning_service.ListCustomModelsResponse() + + client.list_custom_models( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_custom_models_rest_bad_request( + transport: str = "rest", request_type=search_tuning_service.ListCustomModelsRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_custom_models(request) + + +def test_list_custom_models_rest_error(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SearchTuningServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SearchTuningServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + transports.SearchTuningServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = SearchTuningServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SearchTuningServiceGrpcTransport, + ) + + +def test_search_tuning_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SearchTuningServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_search_tuning_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1.services.search_tuning_service.transports.SearchTuningServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SearchTuningServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "train_custom_model", + "list_custom_models", + "get_operation", + "cancel_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_search_tuning_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1.services.search_tuning_service.transports.SearchTuningServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SearchTuningServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_search_tuning_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1.services.search_tuning_service.transports.SearchTuningServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SearchTuningServiceTransport() + adc.assert_called_once() + + +def test_search_tuning_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SearchTuningServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ], +) +def test_search_tuning_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + transports.SearchTuningServiceRestTransport, + ], +) +def test_search_tuning_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SearchTuningServiceGrpcTransport, grpc_helpers), + (transports.SearchTuningServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_search_tuning_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ], +) +def test_search_tuning_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_search_tuning_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SearchTuningServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_search_tuning_service_rest_lro_client(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_search_tuning_service_host_no_port(transport_name): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_search_tuning_service_host_with_port(transport_name): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_search_tuning_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SearchTuningServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SearchTuningServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.train_custom_model._session + session2 = client2.transport.train_custom_model._session + assert session1 != session2 + session1 = client1.transport.list_custom_models._session + session2 = client2.transport.list_custom_models._session + assert session1 != session2 + + +def test_search_tuning_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SearchTuningServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_search_tuning_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SearchTuningServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ], +) +def test_search_tuning_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ], +) +def test_search_tuning_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_search_tuning_service_grpc_lro_client(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_search_tuning_service_grpc_lro_async_client(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_custom_tuning_model_path(): + project = "squid" + location = "clam" + data_store = "whelk" + custom_tuning_model = "octopus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/customTuningModels/{custom_tuning_model}".format( + project=project, + location=location, + data_store=data_store, + custom_tuning_model=custom_tuning_model, + ) + actual = SearchTuningServiceClient.custom_tuning_model_path( + project, location, data_store, custom_tuning_model + ) + assert expected == actual + + +def test_parse_custom_tuning_model_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "data_store": "cuttlefish", + "custom_tuning_model": "mussel", + } + path = SearchTuningServiceClient.custom_tuning_model_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_custom_tuning_model_path(path) + assert expected == actual + + +def test_data_store_path(): + project = "winkle" + location = "nautilus" + data_store = "scallop" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = SearchTuningServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "abalone", + "location": "squid", + "data_store": "clam", + } + path = SearchTuningServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SearchTuningServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = SearchTuningServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = SearchTuningServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = SearchTuningServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = SearchTuningServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = SearchTuningServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = SearchTuningServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = SearchTuningServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = SearchTuningServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = SearchTuningServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SearchTuningServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SearchTuningServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SearchTuningServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_cancel_operation(transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (SearchTuningServiceClient, transports.SearchTuningServiceGrpcTransport), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py index 08b5d51e76e2..c56cf06b35ec 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py @@ -67,6 +67,7 @@ from google.cloud.discoveryengine_v1.types import ( common, import_config, + purge_config, user_event, user_event_service, ) @@ -1809,6 +1810,309 @@ async def test_collect_user_event_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeUserEventsRequest, + dict, + ], +) +def test_purge_user_events(request_type, transport: str = "grpc"): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = purge_config.PurgeUserEventsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_purge_user_events_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.purge_user_events() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeUserEventsRequest() + + +def test_purge_user_events_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = purge_config.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.purge_user_events(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + +def test_purge_user_events_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.purge_user_events in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.purge_user_events + ] = mock_rpc + request = {} + client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.purge_user_events(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_purge_user_events_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.purge_user_events() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeUserEventsRequest() + + +@pytest.mark.asyncio +async def test_purge_user_events_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.purge_user_events + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.purge_user_events + ] = mock_rpc + + request = {} + await client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.purge_user_events(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_purge_user_events_async( + transport: str = "grpc_asyncio", request_type=purge_config.PurgeUserEventsRequest +): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = purge_config.PurgeUserEventsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_purge_user_events_async_from_dict(): + await test_purge_user_events_async(request_type=dict) + + +def test_purge_user_events_field_headers(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeUserEventsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_purge_user_events_field_headers_async(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeUserEventsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -2151,6 +2455,7 @@ def test_write_user_event_rest(request_type): "uri": "uri_value", "quantity": 895, "promotion_ids": ["promotion_ids_value1", "promotion_ids_value2"], + "joined": True, } ], "panel": { @@ -2792,6 +3097,265 @@ def test_collect_user_event_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeUserEventsRequest, + dict, + ], +) +def test_purge_user_events_rest(request_type): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.purge_user_events(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_purge_user_events_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.purge_user_events in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.purge_user_events + ] = mock_rpc + + request = {} + client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.purge_user_events(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_purge_user_events_rest_required_fields( + request_type=purge_config.PurgeUserEventsRequest, +): + transport_class = transports.UserEventServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["filter"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_user_events._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_user_events._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" + + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.purge_user_events(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_purge_user_events_rest_unset_required_fields(): + transport = transports.UserEventServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.purge_user_events._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "filter", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_purge_user_events_rest_interceptors(null_interceptor): + transport = transports.UserEventServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.UserEventServiceRestInterceptor(), + ) + client = UserEventServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.UserEventServiceRestInterceptor, "post_purge_user_events" + ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, "pre_purge_user_events" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = purge_config.PurgeUserEventsRequest.pb( + purge_config.PurgeUserEventsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = purge_config.PurgeUserEventsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.purge_user_events( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_purge_user_events_rest_bad_request( + transport: str = "rest", request_type=purge_config.PurgeUserEventsRequest +): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.purge_user_events(request) + + +def test_purge_user_events_rest_error(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3182,6 +3746,7 @@ def test_user_event_service_base_transport(): methods = ( "write_user_event", "collect_user_event", + "purge_user_events", "import_user_events", "get_operation", "cancel_operation", @@ -3472,6 +4037,9 @@ def test_user_event_service_client_transport_session_collision(transport_name): session1 = client1.transport.collect_user_event._session session2 = client2.transport.collect_user_event._session assert session1 != session2 + session1 = client1.transport.purge_user_events._session + session2 = client2.transport.purge_user_events._session + assert session1 != session2 session1 = client1.transport.import_user_events._session session2 = client2.transport.import_user_events._session assert session1 != session2 diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py index 961c537481ba..b54085a025f3 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py @@ -4181,6 +4181,10 @@ def test_create_data_store_rest(request_type): "external_idp_config": {"workforce_pool_name": "workforce_pool_name_value"}, }, "acl_enabled": True, + "workspace_config": { + "type_": 1, + "dasher_customer_id": "dasher_customer_id_value", + }, "document_processing_config": { "name": "name_value", "chunking_config": { @@ -5653,6 +5657,10 @@ def test_update_data_store_rest(request_type): "external_idp_config": {"workforce_pool_name": "workforce_pool_name_value"}, }, "acl_enabled": True, + "workspace_config": { + "type_": 1, + "dasher_customer_id": "dasher_customer_id_value", + }, "document_processing_config": { "name": "name_value", "chunking_config": { diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py index 8adf368a1a3b..b7b3e0915308 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py @@ -45,10 +45,12 @@ from google.cloud.location import locations_pb2 from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.type import date_pb2 # type: ignore import grpc from grpc.experimental import aio @@ -4302,6 +4304,387 @@ async def test_get_processed_document_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + document_service.BatchGetDocumentsMetadataRequest, + dict, + ], +) +def test_batch_get_documents_metadata(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + response = client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +def test_batch_get_documents_metadata_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +def test_batch_get_documents_metadata_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + +def test_batch_get_documents_metadata_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_get_documents_metadata + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_get_documents_metadata + ] = mock_rpc + request = {} + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.batch_get_documents_metadata + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.batch_get_documents_metadata + ] = mock_rpc + + request = {} + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async( + transport: str = "grpc_asyncio", + request_type=document_service.BatchGetDocumentsMetadataRequest, +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_from_dict(): + await test_batch_get_documents_metadata_async(request_type=dict) + + +def test_batch_get_documents_metadata_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_batch_get_documents_metadata_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_batch_get_documents_metadata_flattened_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_error_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + @pytest.mark.parametrize( "request_type", [ @@ -4309,7 +4692,321 @@ async def test_get_processed_document_flattened_error_async(): dict, ], ) -def test_get_document_rest(request_type): +def test_get_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +def test_get_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + + request = {} + client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_document_rest_required_fields( + request_type=document_service.GetDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_get_document" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_get_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service.GetDocumentRequest.pb( + document_service.GetDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document.Document.to_json(document.Document()) + + request = document_service.GetDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document.Document() + + client.get_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_document_rest_bad_request( + transport: str = "rest", request_type=document_service.GetDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_document(request) + + +def test_get_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + % client.transport._host, + args[1], + ) + + +def test_get_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_document( + document_service.GetDocumentRequest(), + name="name_value", + ) + + +def test_get_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.ListDocumentsRequest, + dict, + ], +) +def test_list_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4317,41 +5014,34 @@ def test_get_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", + return_value = document_service.ListDocumentsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_document_rest_use_cached_wrapped_rpc(): +def test_list_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4365,35 +5055,35 @@ def test_get_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_document in client._transport._wrapped_methods + assert client._transport.list_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc request = {} - client.get_document(request) + client.list_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_document(request) + client.list_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_document_rest_required_fields( - request_type=document_service.GetDocumentRequest, +def test_list_documents_rest_required_fields( + request_type=document_service.ListDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4404,21 +5094,28 @@ def test_get_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4427,7 +5124,7 @@ def test_get_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4448,30 +5145,38 @@ def test_get_document_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_document_rest_unset_required_fields(): +def test_list_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_document_rest_interceptors(null_interceptor): +def test_list_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4484,14 +5189,14 @@ def test_get_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_get_document" + transports.DocumentServiceRestInterceptor, "post_list_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_get_document" + transports.DocumentServiceRestInterceptor, "pre_list_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.GetDocumentRequest.pb( - document_service.GetDocumentRequest() + pb_message = document_service.ListDocumentsRequest.pb( + document_service.ListDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -4503,17 +5208,19 @@ def test_get_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document.Document.to_json(document.Document()) + req.return_value._content = document_service.ListDocumentsResponse.to_json( + document_service.ListDocumentsResponse() + ) - request = document_service.GetDocumentRequest() + request = document_service.ListDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document.Document() + post.return_value = document_service.ListDocumentsResponse() - client.get_document( + client.list_documents( request, metadata=[ ("key", "val"), @@ -4525,8 +5232,8 @@ def test_get_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_document_rest_bad_request( - transport: str = "rest", request_type=document_service.GetDocumentRequest +def test_list_documents_rest_bad_request( + transport: str = "rest", request_type=document_service.ListDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4535,7 +5242,7 @@ def test_get_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -4548,10 +5255,10 @@ def test_get_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_document(request) + client.list_documents(request) -def test_get_document_rest_flattened(): +def test_list_documents_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4560,16 +5267,16 @@ def test_get_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -4577,25 +5284,25 @@ def test_get_document_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_document(**mock_args) + client.list_documents(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1alpha/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" % client.transport._host, args[1], ) -def test_get_document_rest_flattened_error(transport: str = "rest"): +def test_list_documents_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4604,26 +5311,85 @@ def test_get_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_document( - document_service.GetDocumentRequest(), - name="name_value", + client.list_documents( + document_service.ListDocumentsRequest(), + parent="parent_value", ) -def test_get_document_rest_error(): +def test_list_documents_rest_pager(transport: str = "rest"): client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_service.ListDocumentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + pager = client.list_documents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, document.Document) for i in results) + + pages = list(client.list_documents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - document_service.ListDocumentsRequest, + document_service.CreateDocumentRequest, dict, ], ) -def test_list_documents_rest(request_type): +def test_create_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4633,32 +5399,146 @@ def test_list_documents_rest(request_type): request_init = { "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } + request_init["document"] = { + "struct_data": {"fields": {}}, + "json_data": "json_data_value", + "name": "name_value", + "id": "id_value", + "schema_id": "schema_id_value", + "content": { + "raw_bytes": b"raw_bytes_blob", + "uri": "uri_value", + "mime_type": "mime_type_value", + }, + "parent_document_id": "parent_document_id_value", + "derived_struct_data": {}, + "acl_info": { + "readers": [ + { + "principals": [ + {"user_id": "user_id_value", "group_id": "group_id_value"} + ], + "idp_wide": True, + } + ] + }, + "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = document_service.CreateDocumentRequest.meta.fields["document"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["document"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["document"][field])): + del request_init["document"][field][i][subfield] + else: + del request_init["document"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse( - next_page_token="next_page_token_value", + return_value = gcd_document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gcd_document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" -def test_list_documents_rest_use_cached_wrapped_rpc(): +def test_create_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4672,35 +5552,36 @@ def test_list_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_documents in client._transport._wrapped_methods + assert client._transport.create_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc + client._transport._wrapped_methods[client._transport.create_document] = mock_rpc request = {} - client.list_documents(request) + client.create_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_documents(request) + client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_documents_rest_required_fields( - request_type=document_service.ListDocumentsRequest, +def test_create_document_rest_required_fields( + request_type=document_service.CreateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4708,31 +5589,32 @@ def test_list_documents_rest_required_fields( ) # verify fields with default values are dropped + assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == request_init["document_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("document_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4741,7 +5623,7 @@ def test_list_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4753,47 +5635,55 @@ def test_list_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "documentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_documents_rest_unset_required_fields(): +def test_create_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_documents._get_unset_required_fields({}) + unset_fields = transport.create_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("documentId",)) + & set( ( - "pageSize", - "pageToken", + "parent", + "document", + "documentId", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_documents_rest_interceptors(null_interceptor): +def test_create_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4806,14 +5696,14 @@ def test_list_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_list_documents" + transports.DocumentServiceRestInterceptor, "post_create_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_list_documents" + transports.DocumentServiceRestInterceptor, "pre_create_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.ListDocumentsRequest.pb( - document_service.ListDocumentsRequest() + pb_message = document_service.CreateDocumentRequest.pb( + document_service.CreateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -4825,19 +5715,19 @@ def test_list_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document_service.ListDocumentsResponse.to_json( - document_service.ListDocumentsResponse() + req.return_value._content = gcd_document.Document.to_json( + gcd_document.Document() ) - request = document_service.ListDocumentsRequest() + request = document_service.CreateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document_service.ListDocumentsResponse() + post.return_value = gcd_document.Document() - client.list_documents( + client.create_document( request, metadata=[ ("key", "val"), @@ -4849,8 +5739,8 @@ def test_list_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_documents_rest_bad_request( - transport: str = "rest", request_type=document_service.ListDocumentsRequest +def test_create_document_rest_bad_request( + transport: str = "rest", request_type=document_service.CreateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4872,10 +5762,10 @@ def test_list_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_documents(request) + client.create_document(request) -def test_list_documents_rest_flattened(): +def test_create_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4884,7 +5774,7 @@ def test_list_documents_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # get arguments that satisfy an http rule for this method sample_request = { @@ -4894,6 +5784,16 @@ def test_list_documents_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", ) mock_args.update(sample_request) @@ -4901,12 +5801,12 @@ def test_list_documents_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_documents(**mock_args) + client.create_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -4919,7 +5819,7 @@ def test_list_documents_rest_flattened(): ) -def test_list_documents_rest_flattened_error(transport: str = "rest"): +def test_create_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4928,85 +5828,36 @@ def test_list_documents_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_documents( - document_service.ListDocumentsRequest(), + client.create_document( + document_service.CreateDocumentRequest(), parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", ) -def test_list_documents_rest_pager(transport: str = "rest"): +def test_create_document_rest_error(): client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - document_service.ListDocumentsResponse( - documents=[], - next_page_token="def", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token="ghi", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - document_service.ListDocumentsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" - } - - pager = client.list_documents(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, document.Document) for i in results) - - pages = list(client.list_documents(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - document_service.CreateDocumentRequest, + document_service.UpdateDocumentRequest, dict, ], ) -def test_create_document_rest(request_type): +def test_update_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5014,12 +5865,14 @@ def test_create_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request_init["document"] = { "struct_data": {"fields": {}}, "json_data": "json_data_value", - "name": "name_value", + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", "id": "id_value", "schema_id": "schema_id_value", "content": { @@ -5040,13 +5893,28 @@ def test_create_document_rest(request_type): ] }, "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = document_service.CreateDocumentRequest.meta.fields["document"] + test_field = document_service.UpdateDocumentRequest.meta.fields["document"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -5130,7 +5998,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) # Establish that the response is the type that we expect. assert isinstance(response, gcd_document.Document) @@ -5140,7 +6008,7 @@ def get_message_fields(field): assert response.parent_document_id == "parent_document_id_value" -def test_create_document_rest_use_cached_wrapped_rpc(): +def test_update_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5154,36 +6022,34 @@ def test_create_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_document in client._transport._wrapped_methods + assert client._transport.update_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_document] = mock_rpc + client._transport._wrapped_methods[client._transport.update_document] = mock_rpc request = {} - client.create_document(request) + client.update_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_document(request) + client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_document_rest_required_fields( - request_type=document_service.CreateDocumentRequest, +def test_update_document_rest_required_fields( + request_type=document_service.UpdateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5191,32 +6057,27 @@ def test_create_document_rest_required_fields( ) # verify fields with default values are dropped - assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == request_init["document_id"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("document_id",)) + assert not set(unset_fields) - set( + ( + "allow_missing", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5237,7 +6098,7 @@ def test_create_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -5253,39 +6114,32 @@ def test_create_document_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) - expected_params = [ - ( - "documentId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_document_rest_unset_required_fields(): +def test_update_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_document._get_unset_required_fields({}) + unset_fields = transport.update_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("documentId",)) - & set( + set( ( - "parent", - "document", - "documentId", + "allowMissing", + "updateMask", ) ) + & set(("document",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_document_rest_interceptors(null_interceptor): +def test_update_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5298,14 +6152,14 @@ def test_create_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_create_document" + transports.DocumentServiceRestInterceptor, "post_update_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_create_document" + transports.DocumentServiceRestInterceptor, "pre_update_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.CreateDocumentRequest.pb( - document_service.CreateDocumentRequest() + pb_message = document_service.UpdateDocumentRequest.pb( + document_service.UpdateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -5321,7 +6175,7 @@ def test_create_document_rest_interceptors(null_interceptor): gcd_document.Document() ) - request = document_service.CreateDocumentRequest() + request = document_service.UpdateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -5329,7 +6183,7 @@ def test_create_document_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = gcd_document.Document() - client.create_document( + client.update_document( request, metadata=[ ("key", "val"), @@ -5341,8 +6195,8 @@ def test_create_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_document_rest_bad_request( - transport: str = "rest", request_type=document_service.CreateDocumentRequest +def test_update_document_rest_bad_request( + transport: str = "rest", request_type=document_service.UpdateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5351,7 +6205,9 @@ def test_create_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request = request_type(**request_init) @@ -5364,10 +6220,10 @@ def test_create_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_document(request) + client.update_document(request) -def test_create_document_rest_flattened(): +def test_update_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5380,12 +6236,13 @@ def test_create_document_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -5395,7 +6252,7 @@ def test_create_document_rest_flattened(): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -5408,20 +6265,20 @@ def test_create_document_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_document(**mock_args) + client.update_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" + "%s/v1alpha/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_create_document_rest_flattened_error(transport: str = "rest"): +def test_update_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5430,9 +6287,8 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_document( - document_service.CreateDocumentRequest(), - parent="parent_value", + client.update_document( + document_service.UpdateDocumentRequest(), document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -5442,11 +6298,11 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_document_rest_error(): +def test_update_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5455,11 +6311,11 @@ def test_create_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - document_service.UpdateDocumentRequest, + document_service.DeleteDocumentRequest, dict, ], ) -def test_update_document_rest(request_type): +def test_delete_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5467,135 +6323,29 @@ def test_update_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - } - request_init["document"] = { - "struct_data": {"fields": {}}, - "json_data": "json_data_value", - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", - "id": "id_value", - "schema_id": "schema_id_value", - "content": { - "raw_bytes": b"raw_bytes_blob", - "uri": "uri_value", - "mime_type": "mime_type_value", - }, - "parent_document_id": "parent_document_id_value", - "derived_struct_data": {}, - "acl_info": { - "readers": [ - { - "principals": [ - {"user_id": "user_id_value", "group_id": "group_id_value"} - ], - "idp_wide": True, - } - ] - }, - "index_time": {"seconds": 751, "nanos": 543}, + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = document_service.UpdateDocumentRequest.meta.fields["document"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["document"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["document"][field])): - del request_init["document"][field][i][subfield] - else: - del request_init["document"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, gcd_document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert response is None -def test_update_document_rest_use_cached_wrapped_rpc(): +def test_delete_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5609,34 +6359,35 @@ def test_update_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_document in client._transport._wrapped_methods + assert client._transport.delete_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_document] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc request = {} - client.update_document(request) + client.delete_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_document(request) + client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_document_rest_required_fields( - request_type=document_service.UpdateDocumentRequest, +def test_delete_document_rest_required_fields( + request_type=document_service.DeleteDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5647,24 +6398,21 @@ def test_update_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "update_mask", - ) - ) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5673,7 +6421,7 @@ def test_update_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5685,48 +6433,36 @@ def test_update_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_document_rest_unset_required_fields(): +def test_delete_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_document._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "updateMask", - ) - ) - & set(("document",)) - ) + unset_fields = transport.delete_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_document_rest_interceptors(null_interceptor): +def test_delete_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5739,14 +6475,11 @@ def test_update_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_update_document" - ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_update_document" + transports.DocumentServiceRestInterceptor, "pre_delete_document" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = document_service.UpdateDocumentRequest.pb( - document_service.UpdateDocumentRequest() + pb_message = document_service.DeleteDocumentRequest.pb( + document_service.DeleteDocumentRequest() ) transcode.return_value = { "method": "post", @@ -5758,19 +6491,15 @@ def test_update_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gcd_document.Document.to_json( - gcd_document.Document() - ) - request = document_service.UpdateDocumentRequest() + request = document_service.DeleteDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gcd_document.Document() - client.update_document( + client.delete_document( request, metadata=[ ("key", "val"), @@ -5779,11 +6508,10 @@ def test_update_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_update_document_rest_bad_request( - transport: str = "rest", request_type=document_service.UpdateDocumentRequest +def test_delete_document_rest_bad_request( + transport: str = "rest", request_type=document_service.DeleteDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5792,9 +6520,7 @@ def test_update_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } request = request_type(**request_init) @@ -5807,10 +6533,10 @@ def test_update_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_document(request) + client.delete_document(request) -def test_update_document_rest_flattened(): +def test_delete_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5819,53 +6545,40 @@ def test_update_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } - # get truthy value for each flattened field - mock_args = dict( - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_document(**mock_args) + client.delete_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_update_document_rest_flattened_error(transport: str = "rest"): +def test_delete_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5874,22 +6587,13 @@ def test_update_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_document( - document_service.UpdateDocumentRequest(), - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_document( + document_service.DeleteDocumentRequest(), + name="name_value", ) -def test_update_document_rest_error(): +def test_delete_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5898,11 +6602,11 @@ def test_update_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - document_service.DeleteDocumentRequest, + import_config.ImportDocumentsRequest, dict, ], ) -def test_delete_document_rest(request_type): +def test_import_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5910,29 +6614,29 @@ def test_delete_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) # Establish that the response is the type that we expect. - assert response is None + assert response.operation.name == "operations/spam" -def test_delete_document_rest_use_cached_wrapped_rpc(): +def test_import_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5946,35 +6650,41 @@ def test_delete_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_document in client._transport._wrapped_methods + assert client._transport.import_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc + client._transport._wrapped_methods[ + client._transport.import_documents + ] = mock_rpc request = {} - client.delete_document(request) + client.import_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_document(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_document_rest_required_fields( - request_type=document_service.DeleteDocumentRequest, +def test_import_documents_rest_required_fields( + request_type=import_config.ImportDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5985,21 +6695,21 @@ def test_delete_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6008,7 +6718,7 @@ def test_delete_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6020,36 +6730,37 @@ def test_delete_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_document_rest_unset_required_fields(): +def test_import_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.import_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_document_rest_interceptors(null_interceptor): +def test_import_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6062,11 +6773,16 @@ def test_delete_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_delete_document" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_import_documents" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_import_documents" ) as pre: pre.assert_not_called() - pb_message = document_service.DeleteDocumentRequest.pb( - document_service.DeleteDocumentRequest() + post.assert_not_called() + pb_message = import_config.ImportDocumentsRequest.pb( + import_config.ImportDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -6078,15 +6794,19 @@ def test_delete_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = document_service.DeleteDocumentRequest() + request = import_config.ImportDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - client.delete_document( + client.import_documents( request, metadata=[ ("key", "val"), @@ -6095,10 +6815,11 @@ def test_delete_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_document_rest_bad_request( - transport: str = "rest", request_type=document_service.DeleteDocumentRequest +def test_import_documents_rest_bad_request( + transport: str = "rest", request_type=import_config.ImportDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6107,7 +6828,7 @@ def test_delete_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -6120,67 +6841,10 @@ def test_delete_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_document(request) - - -def test_delete_document_rest_flattened(): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_document(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_document_rest_flattened_error(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_document( - document_service.DeleteDocumentRequest(), - name="name_value", - ) + client.import_documents(request) -def test_delete_document_rest_error(): +def test_import_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6189,11 +6853,11 @@ def test_delete_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - import_config.ImportDocumentsRequest, + purge_config.PurgeDocumentsRequest, dict, ], ) -def test_import_documents_rest(request_type): +def test_purge_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6217,13 +6881,13 @@ def test_import_documents_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_import_documents_rest_use_cached_wrapped_rpc(): +def test_purge_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6237,19 +6901,17 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.import_documents in client._transport._wrapped_methods + assert client._transport.purge_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_documents - ] = mock_rpc + client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc request = {} - client.import_documents(request) + client.purge_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -6258,20 +6920,21 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.import_documents(request) + client.purge_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_import_documents_rest_required_fields( - request_type=import_config.ImportDocumentsRequest, +def test_purge_documents_rest_required_fields( + request_type=purge_config.PurgeDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["filter"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6282,21 +6945,24 @@ def test_import_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6330,24 +6996,32 @@ def test_import_documents_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_import_documents_rest_unset_required_fields(): +def test_purge_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.import_documents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.purge_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "filter", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_documents_rest_interceptors(null_interceptor): +def test_purge_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6362,14 +7036,14 @@ def test_import_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_import_documents" + transports.DocumentServiceRestInterceptor, "post_purge_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_import_documents" + transports.DocumentServiceRestInterceptor, "pre_purge_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = import_config.ImportDocumentsRequest.pb( - import_config.ImportDocumentsRequest() + pb_message = purge_config.PurgeDocumentsRequest.pb( + purge_config.PurgeDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -6385,7 +7059,7 @@ def test_import_documents_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = import_config.ImportDocumentsRequest() + request = purge_config.PurgeDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -6393,7 +7067,7 @@ def test_import_documents_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.import_documents( + client.purge_documents( request, metadata=[ ("key", "val"), @@ -6405,8 +7079,8 @@ def test_import_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_import_documents_rest_bad_request( - transport: str = "rest", request_type=import_config.ImportDocumentsRequest +def test_purge_documents_rest_bad_request( + transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6428,10 +7102,10 @@ def test_import_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.import_documents(request) + client.purge_documents(request) -def test_import_documents_rest_error(): +def test_purge_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6440,11 +7114,11 @@ def test_import_documents_rest_error(): @pytest.mark.parametrize( "request_type", [ - purge_config.PurgeDocumentsRequest, + document_service.GetProcessedDocumentRequest, dict, ], ) -def test_purge_documents_rest(request_type): +def test_get_processed_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6452,29 +7126,35 @@ def test_purge_documents_rest(request_type): # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document.ProcessedDocument( + document="document_value", + json_data="json_data_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.ProcessedDocument.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.get_processed_document(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, document.ProcessedDocument) + assert response.document == "document_value" -def test_purge_documents_rest_use_cached_wrapped_rpc(): +def test_get_processed_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6488,40 +7168,40 @@ def test_purge_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.purge_documents in client._transport._wrapped_methods + assert ( + client._transport.get_processed_document + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_processed_document + ] = mock_rpc request = {} - client.purge_documents(request) + client.get_processed_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.purge_documents(request) + client.get_processed_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_purge_documents_rest_required_fields( - request_type=purge_config.PurgeDocumentsRequest, +def test_get_processed_document_rest_required_fields( + request_type=document_service.GetProcessedDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["filter"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6532,24 +7212,28 @@ def test_purge_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).get_processed_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - jsonified_request["filter"] = "filter_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).get_processed_document._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "processed_document_format", + "processed_document_type", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "filter" in jsonified_request - assert jsonified_request["filter"] == "filter_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6558,7 +7242,7 @@ def test_purge_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document.ProcessedDocument() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6570,45 +7254,52 @@ def test_purge_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document.ProcessedDocument.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.get_processed_document(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_purge_documents_rest_unset_required_fields(): +def test_get_processed_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.purge_documents._get_unset_required_fields({}) + unset_fields = transport.get_processed_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set( + ( + "processedDocumentFormat", + "processedDocumentType", + ) + ) & set( ( - "parent", - "filter", + "name", + "processedDocumentType", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_purge_documents_rest_interceptors(null_interceptor): +def test_get_processed_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6621,16 +7312,14 @@ def test_purge_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_purge_documents" + transports.DocumentServiceRestInterceptor, "post_get_processed_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_purge_documents" + transports.DocumentServiceRestInterceptor, "pre_get_processed_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = purge_config.PurgeDocumentsRequest.pb( - purge_config.PurgeDocumentsRequest() + pb_message = document_service.GetProcessedDocumentRequest.pb( + document_service.GetProcessedDocumentRequest() ) transcode.return_value = { "method": "post", @@ -6642,19 +7331,19 @@ def test_purge_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = document.ProcessedDocument.to_json( + document.ProcessedDocument() ) - request = purge_config.PurgeDocumentsRequest() + request = document_service.GetProcessedDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = document.ProcessedDocument() - client.purge_documents( + client.get_processed_document( request, metadata=[ ("key", "val"), @@ -6666,8 +7355,8 @@ def test_purge_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_purge_documents_rest_bad_request( - transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest +def test_get_processed_document_rest_bad_request( + transport: str = "rest", request_type=document_service.GetProcessedDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6676,7 +7365,7 @@ def test_purge_documents_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } request = request_type(**request_init) @@ -6689,10 +7378,69 @@ def test_purge_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.purge_documents(request) + client.get_processed_document(request) -def test_purge_documents_rest_error(): +def test_get_processed_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.ProcessedDocument() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.ProcessedDocument.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_processed_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}:getProcessedDocument" + % client.transport._host, + args[1], + ) + + +def test_get_processed_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_processed_document( + document_service.GetProcessedDocumentRequest(), + name="name_value", + ) + + +def test_get_processed_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6701,11 +7449,11 @@ def test_purge_documents_rest_error(): @pytest.mark.parametrize( "request_type", [ - document_service.GetProcessedDocumentRequest, + document_service.BatchGetDocumentsMetadataRequest, dict, ], ) -def test_get_processed_document_rest(request_type): +def test_batch_get_documents_metadata_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6713,35 +7461,33 @@ def test_get_processed_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.ProcessedDocument( - document="document_value", - json_data="json_data_value", - ) + return_value = document_service.BatchGetDocumentsMetadataResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.ProcessedDocument.pb(return_value) + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_processed_document(request) + response = client.batch_get_documents_metadata(request) # Establish that the response is the type that we expect. - assert isinstance(response, document.ProcessedDocument) - assert response.document == "document_value" + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) -def test_get_processed_document_rest_use_cached_wrapped_rpc(): +def test_batch_get_documents_metadata_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6756,7 +7502,7 @@ def test_get_processed_document_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_processed_document + client._transport.batch_get_documents_metadata in client._transport._wrapped_methods ) @@ -6766,29 +7512,29 @@ def test_get_processed_document_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_processed_document + client._transport.batch_get_documents_metadata ] = mock_rpc request = {} - client.get_processed_document(request) + client.batch_get_documents_metadata(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_processed_document(request) + client.batch_get_documents_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_processed_document_rest_required_fields( - request_type=document_service.GetProcessedDocumentRequest, +def test_batch_get_documents_metadata_rest_required_fields( + request_type=document_service.BatchGetDocumentsMetadataRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6799,28 +7545,23 @@ def test_get_processed_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_processed_document._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_processed_document._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "processed_document_format", - "processed_document_type", - ) - ) + assert not set(unset_fields) - set(("matcher",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6829,7 +7570,7 @@ def test_get_processed_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document.ProcessedDocument() + return_value = document_service.BatchGetDocumentsMetadataResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6850,43 +7591,40 @@ def test_get_processed_document_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.ProcessedDocument.pb(return_value) + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_processed_document(request) + response = client.batch_get_documents_metadata(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_processed_document_rest_unset_required_fields(): +def test_batch_get_documents_metadata_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_processed_document._get_unset_required_fields({}) + unset_fields = transport.batch_get_documents_metadata._get_unset_required_fields({}) assert set(unset_fields) == ( - set( - ( - "processedDocumentFormat", - "processedDocumentType", - ) - ) + set(("matcher",)) & set( ( - "name", - "processedDocumentType", + "parent", + "matcher", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_processed_document_rest_interceptors(null_interceptor): +def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6899,14 +7637,14 @@ def test_get_processed_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_get_processed_document" + transports.DocumentServiceRestInterceptor, "post_batch_get_documents_metadata" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_get_processed_document" + transports.DocumentServiceRestInterceptor, "pre_batch_get_documents_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.GetProcessedDocumentRequest.pb( - document_service.GetProcessedDocumentRequest() + pb_message = document_service.BatchGetDocumentsMetadataRequest.pb( + document_service.BatchGetDocumentsMetadataRequest() ) transcode.return_value = { "method": "post", @@ -6918,19 +7656,21 @@ def test_get_processed_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document.ProcessedDocument.to_json( - document.ProcessedDocument() + req.return_value._content = ( + document_service.BatchGetDocumentsMetadataResponse.to_json( + document_service.BatchGetDocumentsMetadataResponse() + ) ) - request = document_service.GetProcessedDocumentRequest() + request = document_service.BatchGetDocumentsMetadataRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document.ProcessedDocument() + post.return_value = document_service.BatchGetDocumentsMetadataResponse() - client.get_processed_document( + client.batch_get_documents_metadata( request, metadata=[ ("key", "val"), @@ -6942,8 +7682,9 @@ def test_get_processed_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_processed_document_rest_bad_request( - transport: str = "rest", request_type=document_service.GetProcessedDocumentRequest +def test_batch_get_documents_metadata_rest_bad_request( + transport: str = "rest", + request_type=document_service.BatchGetDocumentsMetadataRequest, ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6952,7 +7693,7 @@ def test_get_processed_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -6965,10 +7706,10 @@ def test_get_processed_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_processed_document(request) + client.batch_get_documents_metadata(request) -def test_get_processed_document_rest_flattened(): +def test_batch_get_documents_metadata_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6977,16 +7718,16 @@ def test_get_processed_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.ProcessedDocument() + return_value = document_service.BatchGetDocumentsMetadataResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -6994,25 +7735,27 @@ def test_get_processed_document_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.ProcessedDocument.pb(return_value) + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_processed_document(**mock_args) + client.batch_get_documents_metadata(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}:getProcessedDocument" + "%s/v1alpha/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata" % client.transport._host, args[1], ) -def test_get_processed_document_rest_flattened_error(transport: str = "rest"): +def test_batch_get_documents_metadata_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7021,13 +7764,13 @@ def test_get_processed_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_processed_document( - document_service.GetProcessedDocumentRequest(), - name="name_value", + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", ) -def test_get_processed_document_rest_error(): +def test_batch_get_documents_metadata_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7180,6 +7923,7 @@ def test_document_service_base_transport(): "import_documents", "purge_documents", "get_processed_document", + "batch_get_documents_metadata", "get_operation", "cancel_operation", "list_operations", @@ -7487,6 +8231,9 @@ def test_document_service_client_transport_session_collision(transport_name): session1 = client1.transport.get_processed_document._session session2 = client2.transport.get_processed_document._session assert session1 != session2 + session1 = client1.transport.batch_get_documents_metadata._session + session2 = client2.transport.batch_get_documents_metadata._session + assert session1 != session2 def test_document_service_grpc_transport_channel(): diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py index b23c90ee838d..45144303f6e4 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py @@ -2455,6 +2455,7 @@ def test_write_user_event_rest(request_type): "uri": "uri_value", "quantity": 895, "promotion_ids": ["promotion_ids_value1", "promotion_ids_value2"], + "joined": True, } ], "panel": { diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py index 6ffa744aae25..9345701436e5 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py @@ -45,10 +45,12 @@ from google.cloud.location import locations_pb2 from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.type import date_pb2 # type: ignore import grpc from grpc.experimental import aio @@ -3911,6 +3913,387 @@ async def test_purge_documents_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + document_service.BatchGetDocumentsMetadataRequest, + dict, + ], +) +def test_batch_get_documents_metadata(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + response = client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +def test_batch_get_documents_metadata_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +def test_batch_get_documents_metadata_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + +def test_batch_get_documents_metadata_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_get_documents_metadata + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_get_documents_metadata + ] = mock_rpc + request = {} + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.batch_get_documents_metadata + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.batch_get_documents_metadata + ] = mock_rpc + + request = {} + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async( + transport: str = "grpc_asyncio", + request_type=document_service.BatchGetDocumentsMetadataRequest, +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_from_dict(): + await test_batch_get_documents_metadata_async(request_type=dict) + + +def test_batch_get_documents_metadata_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_batch_get_documents_metadata_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_batch_get_documents_metadata_flattened_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_error_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3918,7 +4301,321 @@ async def test_purge_documents_field_headers_async(): dict, ], ) -def test_get_document_rest(request_type): +def test_get_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +def test_get_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + + request = {} + client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_document_rest_required_fields( + request_type=document_service.GetDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_get_document" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_get_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service.GetDocumentRequest.pb( + document_service.GetDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document.Document.to_json(document.Document()) + + request = document_service.GetDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document.Document() + + client.get_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_document_rest_bad_request( + transport: str = "rest", request_type=document_service.GetDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_document(request) + + +def test_get_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + % client.transport._host, + args[1], + ) + + +def test_get_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_document( + document_service.GetDocumentRequest(), + name="name_value", + ) + + +def test_get_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.ListDocumentsRequest, + dict, + ], +) +def test_list_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3926,41 +4623,34 @@ def test_get_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", + return_value = document_service.ListDocumentsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_document_rest_use_cached_wrapped_rpc(): +def test_list_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3974,35 +4664,35 @@ def test_get_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_document in client._transport._wrapped_methods + assert client._transport.list_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc request = {} - client.get_document(request) + client.list_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_document(request) + client.list_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_document_rest_required_fields( - request_type=document_service.GetDocumentRequest, +def test_list_documents_rest_required_fields( + request_type=document_service.ListDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4013,21 +4703,28 @@ def test_get_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4036,7 +4733,7 @@ def test_get_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4057,30 +4754,38 @@ def test_get_document_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_document_rest_unset_required_fields(): +def test_list_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_document_rest_interceptors(null_interceptor): +def test_list_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4093,14 +4798,14 @@ def test_get_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_get_document" + transports.DocumentServiceRestInterceptor, "post_list_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_get_document" + transports.DocumentServiceRestInterceptor, "pre_list_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.GetDocumentRequest.pb( - document_service.GetDocumentRequest() + pb_message = document_service.ListDocumentsRequest.pb( + document_service.ListDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -4112,17 +4817,19 @@ def test_get_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document.Document.to_json(document.Document()) + req.return_value._content = document_service.ListDocumentsResponse.to_json( + document_service.ListDocumentsResponse() + ) - request = document_service.GetDocumentRequest() + request = document_service.ListDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document.Document() + post.return_value = document_service.ListDocumentsResponse() - client.get_document( + client.list_documents( request, metadata=[ ("key", "val"), @@ -4134,8 +4841,8 @@ def test_get_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_document_rest_bad_request( - transport: str = "rest", request_type=document_service.GetDocumentRequest +def test_list_documents_rest_bad_request( + transport: str = "rest", request_type=document_service.ListDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4144,7 +4851,7 @@ def test_get_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -4157,10 +4864,10 @@ def test_get_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_document(request) + client.list_documents(request) -def test_get_document_rest_flattened(): +def test_list_documents_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4169,16 +4876,16 @@ def test_get_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -4186,25 +4893,25 @@ def test_get_document_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_document(**mock_args) + client.list_documents(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1beta/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" % client.transport._host, args[1], ) -def test_get_document_rest_flattened_error(transport: str = "rest"): +def test_list_documents_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4213,61 +4920,224 @@ def test_get_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_document( - document_service.GetDocumentRequest(), - name="name_value", + client.list_documents( + document_service.ListDocumentsRequest(), + parent="parent_value", + ) + + +def test_list_documents_rest_pager(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_service.ListDocumentsResponse.to_json(x) for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + pager = client.list_documents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, document.Document) for i in results) + + pages = list(client.list_documents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.CreateDocumentRequest, + dict, + ], +) +def test_create_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + request_init["document"] = { + "struct_data": {"fields": {}}, + "json_data": "json_data_value", + "name": "name_value", + "id": "id_value", + "schema_id": "schema_id_value", + "content": { + "raw_bytes": b"raw_bytes_blob", + "uri": "uri_value", + "mime_type": "mime_type_value", + }, + "parent_document_id": "parent_document_id_value", + "derived_struct_data": {}, + "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = document_service.CreateDocumentRequest.meta.fields["document"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -def test_get_document_rest_error(): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["document"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - document_service.ListDocumentsRequest, - dict, - ], -) -def test_list_documents_rest(request_type): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" - } + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["document"][field])): + del request_init["document"][field][i][subfield] + else: + del request_init["document"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse( - next_page_token="next_page_token_value", + return_value = gcd_document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gcd_document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" -def test_list_documents_rest_use_cached_wrapped_rpc(): +def test_create_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4281,35 +5151,36 @@ def test_list_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_documents in client._transport._wrapped_methods + assert client._transport.create_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc + client._transport._wrapped_methods[client._transport.create_document] = mock_rpc request = {} - client.list_documents(request) + client.create_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_documents(request) + client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_documents_rest_required_fields( - request_type=document_service.ListDocumentsRequest, +def test_create_document_rest_required_fields( + request_type=document_service.CreateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4317,31 +5188,32 @@ def test_list_documents_rest_required_fields( ) # verify fields with default values are dropped + assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == request_init["document_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("document_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4350,7 +5222,7 @@ def test_list_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4362,47 +5234,55 @@ def test_list_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "documentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_documents_rest_unset_required_fields(): +def test_create_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_documents._get_unset_required_fields({}) + unset_fields = transport.create_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("documentId",)) + & set( ( - "pageSize", - "pageToken", + "parent", + "document", + "documentId", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_documents_rest_interceptors(null_interceptor): +def test_create_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4415,14 +5295,14 @@ def test_list_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_list_documents" + transports.DocumentServiceRestInterceptor, "post_create_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_list_documents" + transports.DocumentServiceRestInterceptor, "pre_create_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.ListDocumentsRequest.pb( - document_service.ListDocumentsRequest() + pb_message = document_service.CreateDocumentRequest.pb( + document_service.CreateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -4434,19 +5314,19 @@ def test_list_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document_service.ListDocumentsResponse.to_json( - document_service.ListDocumentsResponse() + req.return_value._content = gcd_document.Document.to_json( + gcd_document.Document() ) - request = document_service.ListDocumentsRequest() + request = document_service.CreateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document_service.ListDocumentsResponse() + post.return_value = gcd_document.Document() - client.list_documents( + client.create_document( request, metadata=[ ("key", "val"), @@ -4458,8 +5338,8 @@ def test_list_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_documents_rest_bad_request( - transport: str = "rest", request_type=document_service.ListDocumentsRequest +def test_create_document_rest_bad_request( + transport: str = "rest", request_type=document_service.CreateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4481,10 +5361,10 @@ def test_list_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_documents(request) + client.create_document(request) -def test_list_documents_rest_flattened(): +def test_create_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4493,7 +5373,7 @@ def test_list_documents_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # get arguments that satisfy an http rule for this method sample_request = { @@ -4503,6 +5383,16 @@ def test_list_documents_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", ) mock_args.update(sample_request) @@ -4510,12 +5400,12 @@ def test_list_documents_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_documents(**mock_args) + client.create_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -4528,94 +5418,45 @@ def test_list_documents_rest_flattened(): ) -def test_list_documents_rest_flattened_error(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_documents( - document_service.ListDocumentsRequest(), - parent="parent_value", - ) - - -def test_list_documents_rest_pager(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - document_service.ListDocumentsResponse( - documents=[], - next_page_token="def", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token="ghi", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - document_service.ListDocumentsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" - } +def test_create_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pager = client.list_documents(request=sample_request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_document( + document_service.CreateDocumentRequest(), + parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, document.Document) for i in results) - pages = list(client.list_documents(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_create_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - document_service.CreateDocumentRequest, + document_service.UpdateDocumentRequest, dict, ], ) -def test_create_document_rest(request_type): +def test_update_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4623,12 +5464,14 @@ def test_create_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request_init["document"] = { "struct_data": {"fields": {}}, "json_data": "json_data_value", - "name": "name_value", + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", "id": "id_value", "schema_id": "schema_id_value", "content": { @@ -4639,13 +5482,28 @@ def test_create_document_rest(request_type): "parent_document_id": "parent_document_id_value", "derived_struct_data": {}, "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = document_service.CreateDocumentRequest.meta.fields["document"] + test_field = document_service.UpdateDocumentRequest.meta.fields["document"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -4729,7 +5587,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) # Establish that the response is the type that we expect. assert isinstance(response, gcd_document.Document) @@ -4739,7 +5597,7 @@ def get_message_fields(field): assert response.parent_document_id == "parent_document_id_value" -def test_create_document_rest_use_cached_wrapped_rpc(): +def test_update_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4753,36 +5611,34 @@ def test_create_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_document in client._transport._wrapped_methods + assert client._transport.update_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_document] = mock_rpc + client._transport._wrapped_methods[client._transport.update_document] = mock_rpc request = {} - client.create_document(request) + client.update_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_document(request) + client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_document_rest_required_fields( - request_type=document_service.CreateDocumentRequest, +def test_update_document_rest_required_fields( + request_type=document_service.UpdateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4790,32 +5646,27 @@ def test_create_document_rest_required_fields( ) # verify fields with default values are dropped - assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == request_init["document_id"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("document_id",)) + assert not set(unset_fields) - set( + ( + "allow_missing", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4836,7 +5687,7 @@ def test_create_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -4852,39 +5703,32 @@ def test_create_document_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) - expected_params = [ - ( - "documentId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_document_rest_unset_required_fields(): +def test_update_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_document._get_unset_required_fields({}) + unset_fields = transport.update_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("documentId",)) - & set( + set( ( - "parent", - "document", - "documentId", + "allowMissing", + "updateMask", ) ) + & set(("document",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_document_rest_interceptors(null_interceptor): +def test_update_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4897,14 +5741,14 @@ def test_create_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_create_document" + transports.DocumentServiceRestInterceptor, "post_update_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_create_document" + transports.DocumentServiceRestInterceptor, "pre_update_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.CreateDocumentRequest.pb( - document_service.CreateDocumentRequest() + pb_message = document_service.UpdateDocumentRequest.pb( + document_service.UpdateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -4920,7 +5764,7 @@ def test_create_document_rest_interceptors(null_interceptor): gcd_document.Document() ) - request = document_service.CreateDocumentRequest() + request = document_service.UpdateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4928,7 +5772,7 @@ def test_create_document_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = gcd_document.Document() - client.create_document( + client.update_document( request, metadata=[ ("key", "val"), @@ -4940,8 +5784,8 @@ def test_create_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_document_rest_bad_request( - transport: str = "rest", request_type=document_service.CreateDocumentRequest +def test_update_document_rest_bad_request( + transport: str = "rest", request_type=document_service.UpdateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4950,7 +5794,9 @@ def test_create_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request = request_type(**request_init) @@ -4963,10 +5809,10 @@ def test_create_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_document(request) + client.update_document(request) -def test_create_document_rest_flattened(): +def test_update_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4979,12 +5825,13 @@ def test_create_document_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -4994,7 +5841,7 @@ def test_create_document_rest_flattened(): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -5007,20 +5854,20 @@ def test_create_document_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_document(**mock_args) + client.update_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" + "%s/v1beta/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_create_document_rest_flattened_error(transport: str = "rest"): +def test_update_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5029,9 +5876,8 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_document( - document_service.CreateDocumentRequest(), - parent="parent_value", + client.update_document( + document_service.UpdateDocumentRequest(), document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -5041,150 +5887,54 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_document_rest_error(): +def test_update_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - document_service.UpdateDocumentRequest, - dict, - ], -) -def test_update_document_rest(request_type): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - } - request_init["document"] = { - "struct_data": {"fields": {}}, - "json_data": "json_data_value", - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", - "id": "id_value", - "schema_id": "schema_id_value", - "content": { - "raw_bytes": b"raw_bytes_blob", - "uri": "uri_value", - "mime_type": "mime_type_value", - }, - "parent_document_id": "parent_document_id_value", - "derived_struct_data": {}, - "index_time": {"seconds": 751, "nanos": 543}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = document_service.UpdateDocumentRequest.meta.fields["document"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["document"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["document"][field])): - del request_init["document"][field][i][subfield] - else: - del request_init["document"][field][subfield] + +@pytest.mark.parametrize( + "request_type", + [ + document_service.DeleteDocumentRequest, + dict, + ], +) +def test_delete_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, gcd_document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert response is None -def test_update_document_rest_use_cached_wrapped_rpc(): +def test_delete_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5198,34 +5948,35 @@ def test_update_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_document in client._transport._wrapped_methods + assert client._transport.delete_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_document] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc request = {} - client.update_document(request) + client.delete_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_document(request) + client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_document_rest_required_fields( - request_type=document_service.UpdateDocumentRequest, +def test_delete_document_rest_required_fields( + request_type=document_service.DeleteDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5236,24 +5987,21 @@ def test_update_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "update_mask", - ) - ) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5262,7 +6010,7 @@ def test_update_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5274,48 +6022,36 @@ def test_update_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_document_rest_unset_required_fields(): +def test_delete_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_document._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "updateMask", - ) - ) - & set(("document",)) - ) + unset_fields = transport.delete_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_document_rest_interceptors(null_interceptor): +def test_delete_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5328,14 +6064,11 @@ def test_update_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_update_document" - ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_update_document" + transports.DocumentServiceRestInterceptor, "pre_delete_document" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = document_service.UpdateDocumentRequest.pb( - document_service.UpdateDocumentRequest() + pb_message = document_service.DeleteDocumentRequest.pb( + document_service.DeleteDocumentRequest() ) transcode.return_value = { "method": "post", @@ -5347,19 +6080,15 @@ def test_update_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gcd_document.Document.to_json( - gcd_document.Document() - ) - request = document_service.UpdateDocumentRequest() + request = document_service.DeleteDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gcd_document.Document() - client.update_document( + client.delete_document( request, metadata=[ ("key", "val"), @@ -5368,11 +6097,10 @@ def test_update_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_update_document_rest_bad_request( - transport: str = "rest", request_type=document_service.UpdateDocumentRequest +def test_delete_document_rest_bad_request( + transport: str = "rest", request_type=document_service.DeleteDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5381,9 +6109,7 @@ def test_update_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } request = request_type(**request_init) @@ -5396,10 +6122,10 @@ def test_update_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_document(request) + client.delete_document(request) -def test_update_document_rest_flattened(): +def test_delete_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5408,53 +6134,40 @@ def test_update_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } # get truthy value for each flattened field mock_args = dict( - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_document(**mock_args) + client.delete_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_update_document_rest_flattened_error(transport: str = "rest"): +def test_delete_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5463,22 +6176,13 @@ def test_update_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_document( - document_service.UpdateDocumentRequest(), - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_document( + document_service.DeleteDocumentRequest(), + name="name_value", ) -def test_update_document_rest_error(): +def test_delete_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5487,11 +6191,11 @@ def test_update_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - document_service.DeleteDocumentRequest, + import_config.ImportDocumentsRequest, dict, ], ) -def test_delete_document_rest(request_type): +def test_import_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5499,29 +6203,29 @@ def test_delete_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) # Establish that the response is the type that we expect. - assert response is None + assert response.operation.name == "operations/spam" -def test_delete_document_rest_use_cached_wrapped_rpc(): +def test_import_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5535,35 +6239,41 @@ def test_delete_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_document in client._transport._wrapped_methods + assert client._transport.import_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc + client._transport._wrapped_methods[ + client._transport.import_documents + ] = mock_rpc request = {} - client.delete_document(request) + client.import_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_document(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_document_rest_required_fields( - request_type=document_service.DeleteDocumentRequest, +def test_import_documents_rest_required_fields( + request_type=import_config.ImportDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5574,21 +6284,21 @@ def test_delete_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5597,7 +6307,7 @@ def test_delete_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5609,36 +6319,37 @@ def test_delete_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_document_rest_unset_required_fields(): +def test_import_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.import_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_document_rest_interceptors(null_interceptor): +def test_import_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5651,11 +6362,16 @@ def test_delete_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_delete_document" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_import_documents" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_import_documents" ) as pre: pre.assert_not_called() - pb_message = document_service.DeleteDocumentRequest.pb( - document_service.DeleteDocumentRequest() + post.assert_not_called() + pb_message = import_config.ImportDocumentsRequest.pb( + import_config.ImportDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -5667,15 +6383,19 @@ def test_delete_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = document_service.DeleteDocumentRequest() + request = import_config.ImportDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - client.delete_document( + client.import_documents( request, metadata=[ ("key", "val"), @@ -5684,10 +6404,11 @@ def test_delete_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_document_rest_bad_request( - transport: str = "rest", request_type=document_service.DeleteDocumentRequest +def test_import_documents_rest_bad_request( + transport: str = "rest", request_type=import_config.ImportDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5696,7 +6417,7 @@ def test_delete_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -5709,67 +6430,10 @@ def test_delete_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_document(request) - - -def test_delete_document_rest_flattened(): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_document(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_document_rest_flattened_error(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_document( - document_service.DeleteDocumentRequest(), - name="name_value", - ) + client.import_documents(request) -def test_delete_document_rest_error(): +def test_import_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5778,11 +6442,11 @@ def test_delete_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - import_config.ImportDocumentsRequest, + purge_config.PurgeDocumentsRequest, dict, ], ) -def test_import_documents_rest(request_type): +def test_purge_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5806,13 +6470,13 @@ def test_import_documents_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_import_documents_rest_use_cached_wrapped_rpc(): +def test_purge_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5826,19 +6490,17 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.import_documents in client._transport._wrapped_methods + assert client._transport.purge_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_documents - ] = mock_rpc + client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc request = {} - client.import_documents(request) + client.purge_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -5847,20 +6509,21 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.import_documents(request) + client.purge_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_import_documents_rest_required_fields( - request_type=import_config.ImportDocumentsRequest, +def test_purge_documents_rest_required_fields( + request_type=purge_config.PurgeDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["filter"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5871,21 +6534,24 @@ def test_import_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5919,24 +6585,32 @@ def test_import_documents_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_import_documents_rest_unset_required_fields(): +def test_purge_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.import_documents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.purge_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "filter", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_documents_rest_interceptors(null_interceptor): +def test_purge_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5951,14 +6625,14 @@ def test_import_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_import_documents" + transports.DocumentServiceRestInterceptor, "post_purge_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_import_documents" + transports.DocumentServiceRestInterceptor, "pre_purge_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = import_config.ImportDocumentsRequest.pb( - import_config.ImportDocumentsRequest() + pb_message = purge_config.PurgeDocumentsRequest.pb( + purge_config.PurgeDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -5974,7 +6648,7 @@ def test_import_documents_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = import_config.ImportDocumentsRequest() + request = purge_config.PurgeDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -5982,7 +6656,7 @@ def test_import_documents_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.import_documents( + client.purge_documents( request, metadata=[ ("key", "val"), @@ -5994,8 +6668,8 @@ def test_import_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_import_documents_rest_bad_request( - transport: str = "rest", request_type=import_config.ImportDocumentsRequest +def test_purge_documents_rest_bad_request( + transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6017,10 +6691,10 @@ def test_import_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.import_documents(request) + client.purge_documents(request) -def test_import_documents_rest_error(): +def test_purge_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6029,11 +6703,11 @@ def test_import_documents_rest_error(): @pytest.mark.parametrize( "request_type", [ - purge_config.PurgeDocumentsRequest, + document_service.BatchGetDocumentsMetadataRequest, dict, ], ) -def test_purge_documents_rest(request_type): +def test_batch_get_documents_metadata_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6048,22 +6722,26 @@ def test_purge_documents_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document_service.BatchGetDocumentsMetadataResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.batch_get_documents_metadata(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) -def test_purge_documents_rest_use_cached_wrapped_rpc(): +def test_batch_get_documents_metadata_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6077,40 +6755,40 @@ def test_purge_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.purge_documents in client._transport._wrapped_methods + assert ( + client._transport.batch_get_documents_metadata + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc + client._transport._wrapped_methods[ + client._transport.batch_get_documents_metadata + ] = mock_rpc request = {} - client.purge_documents(request) + client.batch_get_documents_metadata(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.purge_documents(request) + client.batch_get_documents_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_purge_documents_rest_required_fields( - request_type=purge_config.PurgeDocumentsRequest, +def test_batch_get_documents_metadata_rest_required_fields( + request_type=document_service.BatchGetDocumentsMetadataRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" - request_init["filter"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6121,24 +6799,23 @@ def test_purge_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" - jsonified_request["filter"] = "filter_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("matcher",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "filter" in jsonified_request - assert jsonified_request["filter"] == "filter_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6147,7 +6824,7 @@ def test_purge_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document_service.BatchGetDocumentsMetadataResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6159,45 +6836,49 @@ def test_purge_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.batch_get_documents_metadata(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_purge_documents_rest_unset_required_fields(): +def test_batch_get_documents_metadata_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.purge_documents._get_unset_required_fields({}) + unset_fields = transport.batch_get_documents_metadata._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("matcher",)) & set( ( "parent", - "filter", + "matcher", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_purge_documents_rest_interceptors(null_interceptor): +def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6210,16 +6891,14 @@ def test_purge_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_purge_documents" + transports.DocumentServiceRestInterceptor, "post_batch_get_documents_metadata" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_purge_documents" + transports.DocumentServiceRestInterceptor, "pre_batch_get_documents_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = purge_config.PurgeDocumentsRequest.pb( - purge_config.PurgeDocumentsRequest() + pb_message = document_service.BatchGetDocumentsMetadataRequest.pb( + document_service.BatchGetDocumentsMetadataRequest() ) transcode.return_value = { "method": "post", @@ -6231,19 +6910,21 @@ def test_purge_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + document_service.BatchGetDocumentsMetadataResponse.to_json( + document_service.BatchGetDocumentsMetadataResponse() + ) ) - request = purge_config.PurgeDocumentsRequest() + request = document_service.BatchGetDocumentsMetadataRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = document_service.BatchGetDocumentsMetadataResponse() - client.purge_documents( + client.batch_get_documents_metadata( request, metadata=[ ("key", "val"), @@ -6255,8 +6936,9 @@ def test_purge_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_purge_documents_rest_bad_request( - transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest +def test_batch_get_documents_metadata_rest_bad_request( + transport: str = "rest", + request_type=document_service.BatchGetDocumentsMetadataRequest, ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6278,10 +6960,71 @@ def test_purge_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.purge_documents(request) + client.batch_get_documents_metadata(request) -def test_purge_documents_rest_error(): +def test_batch_get_documents_metadata_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.BatchGetDocumentsMetadataResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.batch_get_documents_metadata(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata" + % client.transport._host, + args[1], + ) + + +def test_batch_get_documents_metadata_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + +def test_batch_get_documents_metadata_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6433,6 +7176,7 @@ def test_document_service_base_transport(): "delete_document", "import_documents", "purge_documents", + "batch_get_documents_metadata", "get_operation", "cancel_operation", "list_operations", @@ -6737,6 +7481,9 @@ def test_document_service_client_transport_session_collision(transport_name): session1 = client1.transport.purge_documents._session session2 = client2.transport.purge_documents._session assert session1 != session2 + session1 = client1.transport.batch_get_documents_metadata._session + session2 = client2.transport.batch_get_documents_metadata._session + assert session1 != session2 def test_document_service_grpc_transport_channel(): diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py index 0fd27c82c7e8..e22d9e9f080e 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py @@ -3895,6 +3895,7 @@ def test_create_evaluation_rest(request_type): "include_citations": True, "ignore_adversarial_query": True, "ignore_non_summary_seeking_query": True, + "ignore_low_relevant_content": True, "model_prompt_spec": {"preamble": "preamble_value"}, "language_code": "language_code_value", "model_spec": {"version": "version_value"}, @@ -3934,6 +3935,7 @@ def test_create_evaluation_rest(request_type): "query_id": "query_id_value", "search_result_persistence_count": 3328, }, + "relevance_threshold": 1, }, "query_set_spec": {"sample_query_set": "sample_query_set_value"}, }, diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py index 33f10ee85214..1eddb9e8a3c5 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py @@ -2761,6 +2761,7 @@ def test_update_serving_config_rest(request_type): "include_citations": True, "ignore_adversarial_query": True, "ignore_non_summary_seeking_query": True, + "ignore_low_relevant_content": True, "model_prompt_spec": {"preamble": "preamble_value"}, "language_code": "language_code_value", "model_spec": {"version": "version_value"}, diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py index fe2c1010670e..b04e0007a9c2 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py @@ -2455,6 +2455,7 @@ def test_write_user_event_rest(request_type): "uri": "uri_value", "quantity": 895, "promotion_ids": ["promotion_ids_value1", "promotion_ids_value2"], + "joined": True, } ], "panel": { diff --git a/packages/google-cloud-edgenetwork/CHANGELOG.md b/packages/google-cloud-edgenetwork/CHANGELOG.md index 87be46e4a5c2..07373dfdf459 100644 --- a/packages/google-cloud-edgenetwork/CHANGELOG.md +++ b/packages/google-cloud-edgenetwork/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.11](https://github.com/googleapis/google-cloud-python/compare/google-cloud-edgenetwork-v0.1.10...google-cloud-edgenetwork-v0.1.11) (2024-09-03) + + +### Documentation + +* swap comments on `BONDED` and `NON_BONDED` enums in `BondingType` ([308de6b](https://github.com/googleapis/google-cloud-python/commit/308de6b266e24a8996875736b66485d92f299401)) + ## [0.1.10](https://github.com/googleapis/google-cloud-python/compare/google-cloud-edgenetwork-v0.1.9...google-cloud-edgenetwork-v0.1.10) (2024-07-30) diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py index 558c8aab67c5..4b834789ba9e 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.11" # {x-release-please-version} diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py index 558c8aab67c5..4b834789ba9e 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.11" # {x-release-please-version} diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/types/resources.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/types/resources.py index dee3539ac39e..af3183abd5d5 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/types/resources.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/types/resources.py @@ -224,9 +224,9 @@ class BondingType(proto.Enum): have both bonded and non-bonded connectivity to machines. BONDED (1): - Single homed. - NON_BONDED (2): Multi homed. + NON_BONDED (2): + Single homed. """ BONDING_TYPE_UNSPECIFIED = 0 BONDED = 1 diff --git a/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json b/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json index 04d64b8b1330..4e8d32cac42d 100644 --- a/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json +++ b/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-edgenetwork", - "version": "0.1.0" + "version": "0.1.11" }, "snippets": [ { diff --git a/packages/google-cloud-gke-connect-gateway/.repo-metadata.json b/packages/google-cloud-gke-connect-gateway/.repo-metadata.json index a621420cff60..8f664d786f4a 100644 --- a/packages/google-cloud-gke-connect-gateway/.repo-metadata.json +++ b/packages/google-cloud-gke-connect-gateway/.repo-metadata.json @@ -10,7 +10,7 @@ "repo": "googleapis/google-cloud-python", "distribution_name": "google-cloud-gke-connect-gateway", "api_id": "connectgateway.googleapis.com", - "default_version": "v1beta1", + "default_version": "v1", "codeowner_team": "", "api_shortname": "connectgateway", "api_description": "builds on the power of fleets to let Anthos users connect to and run commands against registered Anthos clusters in a simple, consistent, and secured way, whether the clusters are on Google Cloud, other public clouds, or on premises, and makes it easier to automate DevOps processes across all your clusters." diff --git a/packages/google-cloud-gke-connect-gateway/CHANGELOG.md b/packages/google-cloud-gke-connect-gateway/CHANGELOG.md index 3912730be4d5..3d4aa240a4d4 100644 --- a/packages/google-cloud-gke-connect-gateway/CHANGELOG.md +++ b/packages/google-cloud-gke-connect-gateway/CHANGELOG.md @@ -1,5 +1,22 @@ # Changelog +## [0.9.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-connect-gateway-v0.8.11...google-cloud-gke-connect-gateway-v0.9.0) (2024-08-22) + + +### ⚠ BREAKING CHANGES + +* [google-cloud-gke-connect-gateway] removed the nonfunctional GatewayService and replaced it with the GatewayControl service +* existing client libraries are being regenerated to remove unused functionality and introduce new features. + +### Features + +* [google-cloud-gke-connect-gateway] removed the nonfunctional GatewayService and replaced it with the GatewayControl service ([6639798](https://github.com/googleapis/google-cloud-python/commit/6639798f019e86e72ce6cd5a2c837320439cb2b6)) + + +### Bug Fixes + +* Set google.cloud.gkeconnect.gateway_v1 as the default import ([6639798](https://github.com/googleapis/google-cloud-python/commit/6639798f019e86e72ce6cd5a2c837320439cb2b6)) + ## [0.8.11](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-connect-gateway-v0.8.10...google-cloud-gke-connect-gateway-v0.8.11) (2024-07-30) diff --git a/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/gateway_control.rst b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/gateway_control.rst new file mode 100644 index 000000000000..5f926c0f8738 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/gateway_control.rst @@ -0,0 +1,6 @@ +GatewayControl +-------------------------------- + +.. automodule:: google.cloud.gkeconnect.gateway_v1.services.gateway_control + :members: + :inherited-members: diff --git a/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/services_.rst b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/services_.rst new file mode 100644 index 000000000000..3a27320049ff --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Gkeconnect Gateway v1 API +=================================================== +.. toctree:: + :maxdepth: 2 + + gateway_control diff --git a/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/types_.rst b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/types_.rst new file mode 100644 index 000000000000..e043fb7f8f73 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Gkeconnect Gateway v1 API +================================================ + +.. automodule:: google.cloud.gkeconnect.gateway_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/gateway_service.rst b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/gateway_control.rst similarity index 80% rename from packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/gateway_service.rst rename to packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/gateway_control.rst index 7dffb5a4dc24..7091b0d7e026 100644 --- a/packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/gateway_service.rst +++ b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/gateway_control.rst @@ -1,6 +1,6 @@ -GatewayService +GatewayControl -------------------------------- -.. automodule:: google.cloud.gkeconnect.gateway_v1beta1.services.gateway_service +.. automodule:: google.cloud.gkeconnect.gateway_v1beta1.services.gateway_control :members: :inherited-members: diff --git a/packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/services_.rst b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/services_.rst index c9c94b3690bc..98a39ee54b47 100644 --- a/packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/services_.rst +++ b/packages/google-cloud-gke-connect-gateway/docs/gateway_v1beta1/services_.rst @@ -3,4 +3,4 @@ Services for Google Cloud Gkeconnect Gateway v1beta1 API .. toctree:: :maxdepth: 2 - gateway_service + gateway_control diff --git a/packages/google-cloud-gke-connect-gateway/docs/index.rst b/packages/google-cloud-gke-connect-gateway/docs/index.rst index 16bff39fc480..901b79a633a2 100644 --- a/packages/google-cloud-gke-connect-gateway/docs/index.rst +++ b/packages/google-cloud-gke-connect-gateway/docs/index.rst @@ -2,6 +2,17 @@ .. include:: multiprocessing.rst +This package includes clients for multiple versions of GKE Connect Gateway. +By default, you will get version ``gateway_v1``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + gateway_v1/services_ + gateway_v1/types_ API Reference ------------- diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/__init__.py index 4ec6e91d9d73..f15cecdbcbb1 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/__init__.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/__init__.py @@ -18,14 +18,20 @@ __version__ = package_version.__version__ -from google.cloud.gkeconnect.gateway_v1beta1.services.gateway_service.async_client import ( - GatewayServiceAsyncClient, +from google.cloud.gkeconnect.gateway_v1.services.gateway_control.async_client import ( + GatewayControlAsyncClient, ) -from google.cloud.gkeconnect.gateway_v1beta1.services.gateway_service.client import ( - GatewayServiceClient, +from google.cloud.gkeconnect.gateway_v1.services.gateway_control.client import ( + GatewayControlClient, +) +from google.cloud.gkeconnect.gateway_v1.types.control import ( + GenerateCredentialsRequest, + GenerateCredentialsResponse, ) __all__ = ( - "GatewayServiceClient", - "GatewayServiceAsyncClient", + "GatewayControlClient", + "GatewayControlAsyncClient", + "GenerateCredentialsRequest", + "GenerateCredentialsResponse", ) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/gapic_version.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/gapic_version.py index 558c8aab67c5..a4e5ba3ce496 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/gapic_version.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.9.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/__init__.py new file mode 100644 index 000000000000..31f3cf855bef --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/__init__.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.gkeconnect.gateway_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.gateway_control import GatewayControlAsyncClient, GatewayControlClient +from .types.control import GenerateCredentialsRequest, GenerateCredentialsResponse + +__all__ = ( + "GatewayControlAsyncClient", + "GatewayControlClient", + "GenerateCredentialsRequest", + "GenerateCredentialsResponse", +) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_metadata.json b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_metadata.json new file mode 100644 index 000000000000..5d08e8325089 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_metadata.json @@ -0,0 +1,43 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.gkeconnect.gateway_v1", + "protoPackage": "google.cloud.gkeconnect.gateway.v1", + "schema": "1.0", + "services": { + "GatewayControl": { + "clients": { + "grpc": { + "libraryClient": "GatewayControlClient", + "rpcs": { + "GenerateCredentials": { + "methods": [ + "generate_credentials" + ] + } + } + }, + "grpc-async": { + "libraryClient": "GatewayControlAsyncClient", + "rpcs": { + "GenerateCredentials": { + "methods": [ + "generate_credentials" + ] + } + } + }, + "rest": { + "libraryClient": "GatewayControlClient", + "rpcs": { + "GenerateCredentials": { + "methods": [ + "generate_credentials" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_version.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/py.typed b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/py.typed new file mode 100644 index 000000000000..fc91be3f2256 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-gke-connect-gateway package uses inline types. diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/__init__.py similarity index 79% rename from packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/__init__.py rename to packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/__init__.py index 1e7c2740f4c2..6d98a5d3aec9 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/__init__.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import GatewayServiceAsyncClient -from .client import GatewayServiceClient +from .async_client import GatewayControlAsyncClient +from .client import GatewayControlClient __all__ = ( - "GatewayServiceClient", - "GatewayServiceAsyncClient", + "GatewayControlClient", + "GatewayControlAsyncClient", ) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/async_client.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/async_client.py new file mode 100644 index 000000000000..1f6ebd6ef20d --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/async_client.py @@ -0,0 +1,352 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.gkeconnect.gateway_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.gkeconnect.gateway_v1.types import control + +from .client import GatewayControlClient +from .transports.base import DEFAULT_CLIENT_INFO, GatewayControlTransport +from .transports.grpc_asyncio import GatewayControlGrpcAsyncIOTransport + + +class GatewayControlAsyncClient: + """GatewayControl is the control plane API for Connect Gateway.""" + + _client: GatewayControlClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = GatewayControlClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = GatewayControlClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = GatewayControlClient._DEFAULT_UNIVERSE + + common_billing_account_path = staticmethod( + GatewayControlClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + GatewayControlClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(GatewayControlClient.common_folder_path) + parse_common_folder_path = staticmethod( + GatewayControlClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + GatewayControlClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + GatewayControlClient.parse_common_organization_path + ) + common_project_path = staticmethod(GatewayControlClient.common_project_path) + parse_common_project_path = staticmethod( + GatewayControlClient.parse_common_project_path + ) + common_location_path = staticmethod(GatewayControlClient.common_location_path) + parse_common_location_path = staticmethod( + GatewayControlClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GatewayControlAsyncClient: The constructed client. + """ + return GatewayControlClient.from_service_account_info.__func__(GatewayControlAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GatewayControlAsyncClient: The constructed client. + """ + return GatewayControlClient.from_service_account_file.__func__(GatewayControlAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return GatewayControlClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> GatewayControlTransport: + """Returns the transport used by the client instance. + + Returns: + GatewayControlTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = GatewayControlClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, GatewayControlTransport, Callable[..., GatewayControlTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the gateway control async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,GatewayControlTransport,Callable[..., GatewayControlTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GatewayControlTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = GatewayControlClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def generate_credentials( + self, + request: Optional[Union[control.GenerateCredentialsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control.GenerateCredentialsResponse: + r"""GenerateCredentials provides connection information + that allows a user to access the specified membership + using Connect Gateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.gkeconnect import gateway_v1 + + async def sample_generate_credentials(): + # Create a client + client = gateway_v1.GatewayControlAsyncClient() + + # Initialize request argument(s) + request = gateway_v1.GenerateCredentialsRequest( + name="name_value", + ) + + # Make the request + response = await client.generate_credentials(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsRequest, dict]]): + The request object. A request for connection information + for a particular membership. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsResponse: + Connection information for a + particular membership. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control.GenerateCredentialsRequest): + request = control.GenerateCredentialsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.generate_credentials + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "GatewayControlAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GatewayControlAsyncClient",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/client.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/client.py new file mode 100644 index 000000000000..0f9ff2470144 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/client.py @@ -0,0 +1,760 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.gkeconnect.gateway_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.gkeconnect.gateway_v1.types import control + +from .transports.base import DEFAULT_CLIENT_INFO, GatewayControlTransport +from .transports.grpc import GatewayControlGrpcTransport +from .transports.grpc_asyncio import GatewayControlGrpcAsyncIOTransport +from .transports.rest import GatewayControlRestTransport + + +class GatewayControlClientMeta(type): + """Metaclass for the GatewayControl client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[GatewayControlTransport]] + _transport_registry["grpc"] = GatewayControlGrpcTransport + _transport_registry["grpc_asyncio"] = GatewayControlGrpcAsyncIOTransport + _transport_registry["rest"] = GatewayControlRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[GatewayControlTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GatewayControlClient(metaclass=GatewayControlClientMeta): + """GatewayControl is the control plane API for Connect Gateway.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "connectgateway.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "connectgateway.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GatewayControlClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GatewayControlClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GatewayControlTransport: + """Returns the transport used by the client instance. + + Returns: + GatewayControlTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = GatewayControlClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = GatewayControlClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = GatewayControlClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = GatewayControlClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or GatewayControlClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, GatewayControlTransport, Callable[..., GatewayControlTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the gateway control client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,GatewayControlTransport,Callable[..., GatewayControlTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GatewayControlTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = GatewayControlClient._read_environment_variables() + self._client_cert_source = GatewayControlClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = GatewayControlClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, GatewayControlTransport) + if transport_provided: + # transport is a GatewayControlTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(GatewayControlTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or GatewayControlClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[GatewayControlTransport], Callable[..., GatewayControlTransport] + ] = ( + GatewayControlClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., GatewayControlTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def generate_credentials( + self, + request: Optional[Union[control.GenerateCredentialsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control.GenerateCredentialsResponse: + r"""GenerateCredentials provides connection information + that allows a user to access the specified membership + using Connect Gateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.gkeconnect import gateway_v1 + + def sample_generate_credentials(): + # Create a client + client = gateway_v1.GatewayControlClient() + + # Initialize request argument(s) + request = gateway_v1.GenerateCredentialsRequest( + name="name_value", + ) + + # Make the request + response = client.generate_credentials(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsRequest, dict]): + The request object. A request for connection information + for a particular membership. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsResponse: + Connection information for a + particular membership. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control.GenerateCredentialsRequest): + request = control.GenerateCredentialsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_credentials] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "GatewayControlClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GatewayControlClient",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/__init__.py new file mode 100644 index 000000000000..bc45ac2893ec --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import GatewayControlTransport +from .grpc import GatewayControlGrpcTransport +from .grpc_asyncio import GatewayControlGrpcAsyncIOTransport +from .rest import GatewayControlRestInterceptor, GatewayControlRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[GatewayControlTransport]] +_transport_registry["grpc"] = GatewayControlGrpcTransport +_transport_registry["grpc_asyncio"] = GatewayControlGrpcAsyncIOTransport +_transport_registry["rest"] = GatewayControlRestTransport + +__all__ = ( + "GatewayControlTransport", + "GatewayControlGrpcTransport", + "GatewayControlGrpcAsyncIOTransport", + "GatewayControlRestTransport", + "GatewayControlRestInterceptor", +) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/base.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/base.py new file mode 100644 index 000000000000..0ad0eea11657 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/base.py @@ -0,0 +1,173 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.gkeconnect.gateway_v1 import gapic_version as package_version +from google.cloud.gkeconnect.gateway_v1.types import control + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class GatewayControlTransport(abc.ABC): + """Abstract transport class for GatewayControl.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "connectgateway.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'connectgateway.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.generate_credentials: gapic_v1.method.wrap_method( + self.generate_credentials, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def generate_credentials( + self, + ) -> Callable[ + [control.GenerateCredentialsRequest], + Union[ + control.GenerateCredentialsResponse, + Awaitable[control.GenerateCredentialsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("GatewayControlTransport",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/grpc.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/grpc.py new file mode 100644 index 000000000000..893617331d3d --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/grpc.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.cloud.gkeconnect.gateway_v1.types import control + +from .base import DEFAULT_CLIENT_INFO, GatewayControlTransport + + +class GatewayControlGrpcTransport(GatewayControlTransport): + """gRPC backend transport for GatewayControl. + + GatewayControl is the control plane API for Connect Gateway. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "connectgateway.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'connectgateway.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "connectgateway.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def generate_credentials( + self, + ) -> Callable[ + [control.GenerateCredentialsRequest], control.GenerateCredentialsResponse + ]: + r"""Return a callable for the generate credentials method over gRPC. + + GenerateCredentials provides connection information + that allows a user to access the specified membership + using Connect Gateway. + + Returns: + Callable[[~.GenerateCredentialsRequest], + ~.GenerateCredentialsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_credentials" not in self._stubs: + self._stubs["generate_credentials"] = self.grpc_channel.unary_unary( + "/google.cloud.gkeconnect.gateway.v1.GatewayControl/GenerateCredentials", + request_serializer=control.GenerateCredentialsRequest.serialize, + response_deserializer=control.GenerateCredentialsResponse.deserialize, + ) + return self._stubs["generate_credentials"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("GatewayControlGrpcTransport",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/grpc_asyncio.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/grpc_asyncio.py new file mode 100644 index 000000000000..8f4182cc9522 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/grpc_asyncio.py @@ -0,0 +1,295 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.gkeconnect.gateway_v1.types import control + +from .base import DEFAULT_CLIENT_INFO, GatewayControlTransport +from .grpc import GatewayControlGrpcTransport + + +class GatewayControlGrpcAsyncIOTransport(GatewayControlTransport): + """gRPC AsyncIO backend transport for GatewayControl. + + GatewayControl is the control plane API for Connect Gateway. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "connectgateway.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "connectgateway.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'connectgateway.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def generate_credentials( + self, + ) -> Callable[ + [control.GenerateCredentialsRequest], + Awaitable[control.GenerateCredentialsResponse], + ]: + r"""Return a callable for the generate credentials method over gRPC. + + GenerateCredentials provides connection information + that allows a user to access the specified membership + using Connect Gateway. + + Returns: + Callable[[~.GenerateCredentialsRequest], + Awaitable[~.GenerateCredentialsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_credentials" not in self._stubs: + self._stubs["generate_credentials"] = self.grpc_channel.unary_unary( + "/google.cloud.gkeconnect.gateway.v1.GatewayControl/GenerateCredentials", + request_serializer=control.GenerateCredentialsRequest.serialize, + response_deserializer=control.GenerateCredentialsResponse.deserialize, + ) + return self._stubs["generate_credentials"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.generate_credentials: gapic_v1.method_async.wrap_method( + self.generate_credentials, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("GatewayControlGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/rest.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/rest.py new file mode 100644 index 000000000000..de0570c0ecdb --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/rest.py @@ -0,0 +1,308 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.cloud.gkeconnect.gateway_v1.types import control + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import GatewayControlTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class GatewayControlRestInterceptor: + """Interceptor for GatewayControl. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the GatewayControlRestTransport. + + .. code-block:: python + class MyCustomGatewayControlInterceptor(GatewayControlRestInterceptor): + def pre_generate_credentials(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_credentials(self, response): + logging.log(f"Received response: {response}") + return response + + transport = GatewayControlRestTransport(interceptor=MyCustomGatewayControlInterceptor()) + client = GatewayControlClient(transport=transport) + + + """ + + def pre_generate_credentials( + self, + request: control.GenerateCredentialsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[control.GenerateCredentialsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for generate_credentials + + Override in a subclass to manipulate the request or metadata + before they are sent to the GatewayControl server. + """ + return request, metadata + + def post_generate_credentials( + self, response: control.GenerateCredentialsResponse + ) -> control.GenerateCredentialsResponse: + """Post-rpc interceptor for generate_credentials + + Override in a subclass to manipulate the response + after it is returned by the GatewayControl server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class GatewayControlRestStub: + _session: AuthorizedSession + _host: str + _interceptor: GatewayControlRestInterceptor + + +class GatewayControlRestTransport(GatewayControlTransport): + """REST backend transport for GatewayControl. + + GatewayControl is the control plane API for Connect Gateway. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "connectgateway.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[GatewayControlRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'connectgateway.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or GatewayControlRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GenerateCredentials(GatewayControlRestStub): + def __hash__(self): + return hash("GenerateCredentials") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: control.GenerateCredentialsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control.GenerateCredentialsResponse: + r"""Call the generate credentials method over HTTP. + + Args: + request (~.control.GenerateCredentialsRequest): + The request object. A request for connection information + for a particular membership. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.control.GenerateCredentialsResponse: + Connection information for a + particular membership. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/memberships/*}:generateCredentials", + }, + ] + request, metadata = self._interceptor.pre_generate_credentials( + request, metadata + ) + pb_request = control.GenerateCredentialsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = control.GenerateCredentialsResponse() + pb_resp = control.GenerateCredentialsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_credentials(resp) + return resp + + @property + def generate_credentials( + self, + ) -> Callable[ + [control.GenerateCredentialsRequest], control.GenerateCredentialsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateCredentials(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("GatewayControlRestTransport",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/gateway.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/types/__init__.py similarity index 77% rename from packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/gateway.py rename to packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/types/__init__.py index ca8527fb89ec..2bfb31823e04 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/gateway.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/types/__init__.py @@ -13,12 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore +from .control import GenerateCredentialsRequest, GenerateCredentialsResponse -__protobuf__ = proto.module( - package="google.cloud.gkeconnect.gateway.v1beta1", - manifest={}, +__all__ = ( + "GenerateCredentialsRequest", + "GenerateCredentialsResponse", ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/types/control.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/types/control.py new file mode 100644 index 000000000000..71e358d796f7 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/types/control.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.gkeconnect.gateway.v1", + manifest={ + "GenerateCredentialsRequest", + "GenerateCredentialsResponse", + }, +) + + +class GenerateCredentialsRequest(proto.Message): + r"""A request for connection information for a particular + membership. + + Attributes: + name (str): + Required. The Fleet membership resource. + force_use_agent (bool): + Optional. Whether to force the use of Connect + Agent-based transport. + This will return a configuration that uses + Connect Agent as the underlying transport + mechanism for cluster types that would otherwise + have used a different transport. Requires that + Connect Agent be installed on the cluster. + Setting this field to false is equivalent to not + setting it. + version (str): + Optional. The Connect Gateway version to be + used in the resulting configuration. + + Leave this field blank to let the server choose + the version (recommended). + kubernetes_namespace (str): + Optional. The namespace to use in the kubeconfig context. + + If this field is specified, the server will set the + ``namespace`` field in kubeconfig context. If not specified, + the ``namespace`` field is omitted. + operating_system (google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsRequest.OperatingSystem): + Optional. The operating system where the + kubeconfig will be used. + """ + + class OperatingSystem(proto.Enum): + r"""Operating systems requiring specialized kubeconfigs. + + Values: + OPERATING_SYSTEM_UNSPECIFIED (0): + Generates a kubeconfig that works for all + operating systems not defined below. + OPERATING_SYSTEM_WINDOWS (1): + Generates a kubeconfig that is specifically + designed to work with Windows. + """ + OPERATING_SYSTEM_UNSPECIFIED = 0 + OPERATING_SYSTEM_WINDOWS = 1 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force_use_agent: bool = proto.Field( + proto.BOOL, + number=2, + ) + version: str = proto.Field( + proto.STRING, + number=3, + ) + kubernetes_namespace: str = proto.Field( + proto.STRING, + number=4, + ) + operating_system: OperatingSystem = proto.Field( + proto.ENUM, + number=5, + enum=OperatingSystem, + ) + + +class GenerateCredentialsResponse(proto.Message): + r"""Connection information for a particular membership. + + Attributes: + kubeconfig (bytes): + A full YAML kubeconfig in serialized format. + endpoint (str): + The generated URI of the cluster as accessed + through the Connect Gateway API. + """ + + kubeconfig: bytes = proto.Field( + proto.BYTES, + number=1, + ) + endpoint: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/__init__.py index d16e8a461ad2..f58bb78cb5e7 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/__init__.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/__init__.py @@ -18,9 +18,12 @@ __version__ = package_version.__version__ -from .services.gateway_service import GatewayServiceAsyncClient, GatewayServiceClient +from .services.gateway_control import GatewayControlAsyncClient, GatewayControlClient +from .types.control import GenerateCredentialsRequest, GenerateCredentialsResponse __all__ = ( - "GatewayServiceAsyncClient", - "GatewayServiceClient", + "GatewayControlAsyncClient", + "GatewayControlClient", + "GenerateCredentialsRequest", + "GenerateCredentialsResponse", ) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_metadata.json b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_metadata.json index 38a616fb276e..33fee488e86c 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_metadata.json +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_metadata.json @@ -5,64 +5,24 @@ "protoPackage": "google.cloud.gkeconnect.gateway.v1beta1", "schema": "1.0", "services": { - "GatewayService": { + "GatewayControl": { "clients": { "grpc": { - "libraryClient": "GatewayServiceClient", + "libraryClient": "GatewayControlClient", "rpcs": { - "DeleteResource": { + "GenerateCredentials": { "methods": [ - "delete_resource" - ] - }, - "GetResource": { - "methods": [ - "get_resource" - ] - }, - "PatchResource": { - "methods": [ - "patch_resource" - ] - }, - "PostResource": { - "methods": [ - "post_resource" - ] - }, - "PutResource": { - "methods": [ - "put_resource" + "generate_credentials" ] } } }, "grpc-async": { - "libraryClient": "GatewayServiceAsyncClient", + "libraryClient": "GatewayControlAsyncClient", "rpcs": { - "DeleteResource": { - "methods": [ - "delete_resource" - ] - }, - "GetResource": { - "methods": [ - "get_resource" - ] - }, - "PatchResource": { - "methods": [ - "patch_resource" - ] - }, - "PostResource": { - "methods": [ - "post_resource" - ] - }, - "PutResource": { + "GenerateCredentials": { "methods": [ - "put_resource" + "generate_credentials" ] } } diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py index 558c8aab67c5..a4e5ba3ce496 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.9.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/__init__.py new file mode 100644 index 000000000000..6d98a5d3aec9 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import GatewayControlAsyncClient +from .client import GatewayControlClient + +__all__ = ( + "GatewayControlClient", + "GatewayControlAsyncClient", +) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/async_client.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/async_client.py new file mode 100644 index 000000000000..a00f6b015f62 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/async_client.py @@ -0,0 +1,352 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.gkeconnect.gateway_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.gkeconnect.gateway_v1beta1.types import control + +from .client import GatewayControlClient +from .transports.base import DEFAULT_CLIENT_INFO, GatewayControlTransport +from .transports.grpc_asyncio import GatewayControlGrpcAsyncIOTransport + + +class GatewayControlAsyncClient: + """GatewayControl is the control plane API for Connect Gateway.""" + + _client: GatewayControlClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = GatewayControlClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = GatewayControlClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = GatewayControlClient._DEFAULT_UNIVERSE + + common_billing_account_path = staticmethod( + GatewayControlClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + GatewayControlClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(GatewayControlClient.common_folder_path) + parse_common_folder_path = staticmethod( + GatewayControlClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + GatewayControlClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + GatewayControlClient.parse_common_organization_path + ) + common_project_path = staticmethod(GatewayControlClient.common_project_path) + parse_common_project_path = staticmethod( + GatewayControlClient.parse_common_project_path + ) + common_location_path = staticmethod(GatewayControlClient.common_location_path) + parse_common_location_path = staticmethod( + GatewayControlClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GatewayControlAsyncClient: The constructed client. + """ + return GatewayControlClient.from_service_account_info.__func__(GatewayControlAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GatewayControlAsyncClient: The constructed client. + """ + return GatewayControlClient.from_service_account_file.__func__(GatewayControlAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return GatewayControlClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> GatewayControlTransport: + """Returns the transport used by the client instance. + + Returns: + GatewayControlTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = GatewayControlClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, GatewayControlTransport, Callable[..., GatewayControlTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the gateway control async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,GatewayControlTransport,Callable[..., GatewayControlTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GatewayControlTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = GatewayControlClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def generate_credentials( + self, + request: Optional[Union[control.GenerateCredentialsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control.GenerateCredentialsResponse: + r"""GenerateCredentials provides connection information + that allows a user to access the specified membership + using Connect Gateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.gkeconnect import gateway_v1beta1 + + async def sample_generate_credentials(): + # Create a client + client = gateway_v1beta1.GatewayControlAsyncClient() + + # Initialize request argument(s) + request = gateway_v1beta1.GenerateCredentialsRequest( + name="name_value", + ) + + # Make the request + response = await client.generate_credentials(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsRequest, dict]]): + The request object. A request for connection information + for a particular membership. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsResponse: + Connection information for a + particular membership. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control.GenerateCredentialsRequest): + request = control.GenerateCredentialsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.generate_credentials + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "GatewayControlAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GatewayControlAsyncClient",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/client.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/client.py new file mode 100644 index 000000000000..be9bc2efdcf0 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/client.py @@ -0,0 +1,758 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.gkeconnect.gateway_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.gkeconnect.gateway_v1beta1.types import control + +from .transports.base import DEFAULT_CLIENT_INFO, GatewayControlTransport +from .transports.grpc import GatewayControlGrpcTransport +from .transports.grpc_asyncio import GatewayControlGrpcAsyncIOTransport + + +class GatewayControlClientMeta(type): + """Metaclass for the GatewayControl client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[GatewayControlTransport]] + _transport_registry["grpc"] = GatewayControlGrpcTransport + _transport_registry["grpc_asyncio"] = GatewayControlGrpcAsyncIOTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[GatewayControlTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GatewayControlClient(metaclass=GatewayControlClientMeta): + """GatewayControl is the control plane API for Connect Gateway.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "connectgateway.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "connectgateway.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GatewayControlClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GatewayControlClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GatewayControlTransport: + """Returns the transport used by the client instance. + + Returns: + GatewayControlTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = GatewayControlClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = GatewayControlClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = GatewayControlClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = GatewayControlClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or GatewayControlClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, GatewayControlTransport, Callable[..., GatewayControlTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the gateway control client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,GatewayControlTransport,Callable[..., GatewayControlTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GatewayControlTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = GatewayControlClient._read_environment_variables() + self._client_cert_source = GatewayControlClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = GatewayControlClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, GatewayControlTransport) + if transport_provided: + # transport is a GatewayControlTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(GatewayControlTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or GatewayControlClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[GatewayControlTransport], Callable[..., GatewayControlTransport] + ] = ( + GatewayControlClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., GatewayControlTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def generate_credentials( + self, + request: Optional[Union[control.GenerateCredentialsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> control.GenerateCredentialsResponse: + r"""GenerateCredentials provides connection information + that allows a user to access the specified membership + using Connect Gateway. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.gkeconnect import gateway_v1beta1 + + def sample_generate_credentials(): + # Create a client + client = gateway_v1beta1.GatewayControlClient() + + # Initialize request argument(s) + request = gateway_v1beta1.GenerateCredentialsRequest( + name="name_value", + ) + + # Make the request + response = client.generate_credentials(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsRequest, dict]): + The request object. A request for connection information + for a particular membership. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsResponse: + Connection information for a + particular membership. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, control.GenerateCredentialsRequest): + request = control.GenerateCredentialsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_credentials] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "GatewayControlClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GatewayControlClient",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/__init__.py similarity index 66% rename from packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/__init__.py rename to packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/__init__.py index d04e25a4ef88..3692124a1a40 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/transports/__init__.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/__init__.py @@ -16,17 +16,17 @@ from collections import OrderedDict from typing import Dict, Type -from .base import GatewayServiceTransport -from .grpc import GatewayServiceGrpcTransport -from .grpc_asyncio import GatewayServiceGrpcAsyncIOTransport +from .base import GatewayControlTransport +from .grpc import GatewayControlGrpcTransport +from .grpc_asyncio import GatewayControlGrpcAsyncIOTransport # Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[GatewayServiceTransport]] -_transport_registry["grpc"] = GatewayServiceGrpcTransport -_transport_registry["grpc_asyncio"] = GatewayServiceGrpcAsyncIOTransport +_transport_registry = OrderedDict() # type: Dict[str, Type[GatewayControlTransport]] +_transport_registry["grpc"] = GatewayControlGrpcTransport +_transport_registry["grpc_asyncio"] = GatewayControlGrpcAsyncIOTransport __all__ = ( - "GatewayServiceTransport", - "GatewayServiceGrpcTransport", - "GatewayServiceGrpcAsyncIOTransport", + "GatewayControlTransport", + "GatewayControlGrpcTransport", + "GatewayControlGrpcAsyncIOTransport", ) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/base.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/base.py new file mode 100644 index 000000000000..5640d2d30b08 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/base.py @@ -0,0 +1,173 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.gkeconnect.gateway_v1beta1 import gapic_version as package_version +from google.cloud.gkeconnect.gateway_v1beta1.types import control + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class GatewayControlTransport(abc.ABC): + """Abstract transport class for GatewayControl.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "connectgateway.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'connectgateway.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.generate_credentials: gapic_v1.method.wrap_method( + self.generate_credentials, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def generate_credentials( + self, + ) -> Callable[ + [control.GenerateCredentialsRequest], + Union[ + control.GenerateCredentialsResponse, + Awaitable[control.GenerateCredentialsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("GatewayControlTransport",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/grpc.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/grpc.py new file mode 100644 index 000000000000..c61f1c5d018d --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/grpc.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.cloud.gkeconnect.gateway_v1beta1.types import control + +from .base import DEFAULT_CLIENT_INFO, GatewayControlTransport + + +class GatewayControlGrpcTransport(GatewayControlTransport): + """gRPC backend transport for GatewayControl. + + GatewayControl is the control plane API for Connect Gateway. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "connectgateway.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'connectgateway.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "connectgateway.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def generate_credentials( + self, + ) -> Callable[ + [control.GenerateCredentialsRequest], control.GenerateCredentialsResponse + ]: + r"""Return a callable for the generate credentials method over gRPC. + + GenerateCredentials provides connection information + that allows a user to access the specified membership + using Connect Gateway. + + Returns: + Callable[[~.GenerateCredentialsRequest], + ~.GenerateCredentialsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_credentials" not in self._stubs: + self._stubs["generate_credentials"] = self.grpc_channel.unary_unary( + "/google.cloud.gkeconnect.gateway.v1beta1.GatewayControl/GenerateCredentials", + request_serializer=control.GenerateCredentialsRequest.serialize, + response_deserializer=control.GenerateCredentialsResponse.deserialize, + ) + return self._stubs["generate_credentials"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("GatewayControlGrpcTransport",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/grpc_asyncio.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/grpc_asyncio.py new file mode 100644 index 000000000000..f51400e32386 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/grpc_asyncio.py @@ -0,0 +1,295 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.gkeconnect.gateway_v1beta1.types import control + +from .base import DEFAULT_CLIENT_INFO, GatewayControlTransport +from .grpc import GatewayControlGrpcTransport + + +class GatewayControlGrpcAsyncIOTransport(GatewayControlTransport): + """gRPC AsyncIO backend transport for GatewayControl. + + GatewayControl is the control plane API for Connect Gateway. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "connectgateway.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "connectgateway.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'connectgateway.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def generate_credentials( + self, + ) -> Callable[ + [control.GenerateCredentialsRequest], + Awaitable[control.GenerateCredentialsResponse], + ]: + r"""Return a callable for the generate credentials method over gRPC. + + GenerateCredentials provides connection information + that allows a user to access the specified membership + using Connect Gateway. + + Returns: + Callable[[~.GenerateCredentialsRequest], + Awaitable[~.GenerateCredentialsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_credentials" not in self._stubs: + self._stubs["generate_credentials"] = self.grpc_channel.unary_unary( + "/google.cloud.gkeconnect.gateway.v1beta1.GatewayControl/GenerateCredentials", + request_serializer=control.GenerateCredentialsRequest.serialize, + response_deserializer=control.GenerateCredentialsResponse.deserialize, + ) + return self._stubs["generate_credentials"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.generate_credentials: gapic_v1.method_async.wrap_method( + self.generate_credentials, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("GatewayControlGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/async_client.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/async_client.py deleted file mode 100644 index 067ac7ab5cc4..000000000000 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_service/async_client.py +++ /dev/null @@ -1,1133 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import re -from typing import ( - Callable, - Dict, - Mapping, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, -) - -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.api_core.client_options import ClientOptions -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.gkeconnect.gateway_v1beta1 import gapic_version as package_version - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api import httpbody_pb2 # type: ignore -from google.protobuf import any_pb2 # type: ignore - -from .client import GatewayServiceClient -from .transports.base import DEFAULT_CLIENT_INFO, GatewayServiceTransport -from .transports.grpc_asyncio import GatewayServiceGrpcAsyncIOTransport - - -class GatewayServiceAsyncClient: - """Gateway service is a public API which works as a Kubernetes - resource model proxy between end users and registered Kubernetes - clusters. Each RPC in this service matches with an HTTP verb. - End user will initiate kubectl commands against the Gateway - service, and Gateway service will forward user requests to - clusters. - """ - - _client: GatewayServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = GatewayServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = GatewayServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = GatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = GatewayServiceClient._DEFAULT_UNIVERSE - - common_billing_account_path = staticmethod( - GatewayServiceClient.common_billing_account_path - ) - parse_common_billing_account_path = staticmethod( - GatewayServiceClient.parse_common_billing_account_path - ) - common_folder_path = staticmethod(GatewayServiceClient.common_folder_path) - parse_common_folder_path = staticmethod( - GatewayServiceClient.parse_common_folder_path - ) - common_organization_path = staticmethod( - GatewayServiceClient.common_organization_path - ) - parse_common_organization_path = staticmethod( - GatewayServiceClient.parse_common_organization_path - ) - common_project_path = staticmethod(GatewayServiceClient.common_project_path) - parse_common_project_path = staticmethod( - GatewayServiceClient.parse_common_project_path - ) - common_location_path = staticmethod(GatewayServiceClient.common_location_path) - parse_common_location_path = staticmethod( - GatewayServiceClient.parse_common_location_path - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - GatewayServiceAsyncClient: The constructed client. - """ - return GatewayServiceClient.from_service_account_info.__func__(GatewayServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - GatewayServiceAsyncClient: The constructed client. - """ - return GatewayServiceClient.from_service_account_file.__func__(GatewayServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source( - cls, client_options: Optional[ClientOptions] = None - ): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return GatewayServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> GatewayServiceTransport: - """Returns the transport used by the client instance. - - Returns: - GatewayServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = GatewayServiceClient.get_transport_class - - def __init__( - self, - *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[ - Union[str, GatewayServiceTransport, Callable[..., GatewayServiceTransport]] - ] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the gateway service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,GatewayServiceTransport,Callable[..., GatewayServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the GatewayServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = GatewayServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - ) - - async def get_resource( - self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""GetResource performs an HTTP GET request on the - Kubernetes API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - async def sample_get_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = await client.get_resource(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.api.httpbody_pb2.HttpBody, dict]]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = httpbody_pb2.HttpBody(**request) - elif not request: - request = httpbody_pb2.HttpBody() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.get_resource - ] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def post_resource( - self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""PostResource performs an HTTP POST on the Kubernetes - API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - async def sample_post_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = await client.post_resource(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.api.httpbody_pb2.HttpBody, dict]]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = httpbody_pb2.HttpBody(**request) - elif not request: - request = httpbody_pb2.HttpBody() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.post_resource - ] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_resource( - self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""DeleteResource performs an HTTP DELETE on the - Kubernetes API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - async def sample_delete_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = await client.delete_resource(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.api.httpbody_pb2.HttpBody, dict]]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = httpbody_pb2.HttpBody(**request) - elif not request: - request = httpbody_pb2.HttpBody() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.delete_resource - ] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def put_resource( - self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""PutResource performs an HTTP PUT on the Kubernetes - API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - async def sample_put_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = await client.put_resource(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.api.httpbody_pb2.HttpBody, dict]]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = httpbody_pb2.HttpBody(**request) - elif not request: - request = httpbody_pb2.HttpBody() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.put_resource - ] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def patch_resource( - self, - request: Optional[Union[httpbody_pb2.HttpBody, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> httpbody_pb2.HttpBody: - r"""PatchResource performs an HTTP PATCH on the - Kubernetes API Server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.api import httpbody_pb2 # type: ignore - from google.cloud.gkeconnect import gateway_v1beta1 - - async def sample_patch_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = await client.patch_resource(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.api.httpbody_pb2.HttpBody, dict]]): - The request object. Message that represents an arbitrary HTTP body. It - should only be used for payload formats that can't be - represented as JSON, such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as the - response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request fields - and also want access to the raw HTTP body. - - Example: - - :: - - message GetResourceRequest { - // A unique request id. - string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - :: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api.httpbody_pb2.HttpBody: - Message that represents an arbitrary HTTP body. It should only be used for - payload formats that can't be represented as JSON, - such as raw binary or an HTML page. - - This message can be used both in streaming and - non-streaming API methods in the request as well as - the response. - - It can be used as a top-level request field, which is - convenient if one wants to extract parameters from - either the URL or HTTP template into the request - fields and also want access to the raw HTTP body. - - Example: - - message GetResourceRequest { - // A unique request id. string request_id = 1; - - // The raw HTTP body is bound to this field. - google.api.HttpBody http_body = 2; - - } - - service ResourceService { - rpc GetResource(GetResourceRequest) - returns (google.api.HttpBody); - - rpc UpdateResource(google.api.HttpBody) - returns (google.protobuf.Empty); - - } - - Example with streaming methods: - - service CaldavService { - rpc GetCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - rpc UpdateCalendar(stream google.api.HttpBody) - returns (stream google.api.HttpBody); - - } - - Use of this type only changes how the request and - response bodies are handled, all other features will - continue to work unchanged. - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = httpbody_pb2.HttpBody(**request) - elif not request: - request = httpbody_pb2.HttpBody() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.patch_resource - ] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "GatewayServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -__all__ = ("GatewayServiceAsyncClient",) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/__init__.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/__init__.py index 77c6c7a35ebe..2bfb31823e04 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/__init__.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/__init__.py @@ -13,5 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .control import GenerateCredentialsRequest, GenerateCredentialsResponse -__all__ = () +__all__ = ( + "GenerateCredentialsRequest", + "GenerateCredentialsResponse", +) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/control.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/control.py new file mode 100644 index 000000000000..4afb5fd530f1 --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/types/control.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.gkeconnect.gateway.v1beta1", + manifest={ + "GenerateCredentialsRequest", + "GenerateCredentialsResponse", + }, +) + + +class GenerateCredentialsRequest(proto.Message): + r"""A request for connection information for a particular + membership. + + Attributes: + name (str): + Required. The Fleet membership resource. + force_use_agent (bool): + Optional. Whether to force the use of Connect + Agent-based transport. + This will return a configuration that uses + Connect Agent as the underlying transport + mechanism for cluster types that would otherwise + have used a different transport. Requires that + Connect Agent be installed on the cluster. + Setting this field to false is equivalent to not + setting it. + version (str): + Optional. The Connect Gateway version to be + used in the resulting configuration. + + Leave this field blank to let the server choose + the version (recommended). + kubernetes_namespace (str): + Optional. The namespace to use in the kubeconfig context. + + If this field is specified, the server will set the + ``namespace`` field in kubeconfig context. If not specified, + the ``namespace`` field is omitted. + operating_system (google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsRequest.OperatingSystem): + Optional. The operating system where the + kubeconfig will be used. + """ + + class OperatingSystem(proto.Enum): + r"""Operating systems requiring specialized kubeconfigs. + + Values: + OPERATING_SYSTEM_UNSPECIFIED (0): + Generates a kubeconfig that works for all + operating systems not defined below. + OPERATING_SYSTEM_WINDOWS (1): + Generates a kubeconfig that is specifically + designed to work with Windows. + """ + OPERATING_SYSTEM_UNSPECIFIED = 0 + OPERATING_SYSTEM_WINDOWS = 1 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force_use_agent: bool = proto.Field( + proto.BOOL, + number=2, + ) + version: str = proto.Field( + proto.STRING, + number=3, + ) + kubernetes_namespace: str = proto.Field( + proto.STRING, + number=4, + ) + operating_system: OperatingSystem = proto.Field( + proto.ENUM, + number=5, + enum=OperatingSystem, + ) + + +class GenerateCredentialsResponse(proto.Message): + r"""Connection information for a particular membership. + + Attributes: + kubeconfig (bytes): + A full YAML kubeconfig in serialized format. + endpoint (str): + The generated URI of the cluster as accessed + through the Connect Gateway API. + """ + + kubeconfig: bytes = proto.Field( + proto.BYTES, + number=1, + ) + endpoint: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_patch_resource_async.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1_generated_gateway_control_generate_credentials_async.py similarity index 75% rename from packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_patch_resource_async.py rename to packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1_generated_gateway_control_generate_credentials_async.py index 2351568e5723..ead6fc1342dd 100644 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_patch_resource_async.py +++ b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1_generated_gateway_control_generate_credentials_async.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for PatchResource +# Snippet for GenerateCredentials # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-gke-connect-gateway -# [START connectgateway_v1beta1_generated_GatewayService_PatchResource_async] +# [START connectgateway_v1_generated_GatewayControl_GenerateCredentials_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore -from google.cloud.gkeconnect import gateway_v1beta1 +from google.cloud.gkeconnect import gateway_v1 -async def sample_patch_resource(): +async def sample_generate_credentials(): # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() + client = gateway_v1.GatewayControlAsyncClient() # Initialize request argument(s) - request = httpbody_pb2.HttpBody( + request = gateway_v1.GenerateCredentialsRequest( + name="name_value", ) # Make the request - response = await client.patch_resource(request=request) + response = await client.generate_credentials(request=request) # Handle the response print(response) -# [END connectgateway_v1beta1_generated_GatewayService_PatchResource_async] +# [END connectgateway_v1_generated_GatewayControl_GenerateCredentials_async] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_patch_resource_sync.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1_generated_gateway_control_generate_credentials_sync.py similarity index 75% rename from packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_patch_resource_sync.py rename to packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1_generated_gateway_control_generate_credentials_sync.py index e88cf56e0b48..fcb75ad701c7 100644 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_patch_resource_sync.py +++ b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1_generated_gateway_control_generate_credentials_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for PatchResource +# Snippet for GenerateCredentials # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-gke-connect-gateway -# [START connectgateway_v1beta1_generated_GatewayService_PatchResource_sync] +# [START connectgateway_v1_generated_GatewayControl_GenerateCredentials_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore -from google.cloud.gkeconnect import gateway_v1beta1 +from google.cloud.gkeconnect import gateway_v1 -def sample_patch_resource(): +def sample_generate_credentials(): # Create a client - client = gateway_v1beta1.GatewayServiceClient() + client = gateway_v1.GatewayControlClient() # Initialize request argument(s) - request = httpbody_pb2.HttpBody( + request = gateway_v1.GenerateCredentialsRequest( + name="name_value", ) # Make the request - response = client.patch_resource(request=request) + response = client.generate_credentials(request=request) # Handle the response print(response) -# [END connectgateway_v1beta1_generated_GatewayService_PatchResource_sync] +# [END connectgateway_v1_generated_GatewayControl_GenerateCredentials_sync] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_delete_resource_async.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_control_generate_credentials_async.py similarity index 76% rename from packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_delete_resource_async.py rename to packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_control_generate_credentials_async.py index fb088f6698bd..61202c081a3f 100644 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_delete_resource_async.py +++ b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_control_generate_credentials_async.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for DeleteResource +# Snippet for GenerateCredentials # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-gke-connect-gateway -# [START connectgateway_v1beta1_generated_GatewayService_DeleteResource_async] +# [START connectgateway_v1beta1_generated_GatewayControl_GenerateCredentials_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore from google.cloud.gkeconnect import gateway_v1beta1 -async def sample_delete_resource(): +async def sample_generate_credentials(): # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() + client = gateway_v1beta1.GatewayControlAsyncClient() # Initialize request argument(s) - request = httpbody_pb2.HttpBody( + request = gateway_v1beta1.GenerateCredentialsRequest( + name="name_value", ) # Make the request - response = await client.delete_resource(request=request) + response = await client.generate_credentials(request=request) # Handle the response print(response) -# [END connectgateway_v1beta1_generated_GatewayService_DeleteResource_async] +# [END connectgateway_v1beta1_generated_GatewayControl_GenerateCredentials_async] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_get_resource_sync.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_control_generate_credentials_sync.py similarity index 77% rename from packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_get_resource_sync.py rename to packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_control_generate_credentials_sync.py index c83488e8f680..87603404a7a2 100644 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_get_resource_sync.py +++ b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_control_generate_credentials_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetResource +# Snippet for GenerateCredentials # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-gke-connect-gateway -# [START connectgateway_v1beta1_generated_GatewayService_GetResource_sync] +# [START connectgateway_v1beta1_generated_GatewayControl_GenerateCredentials_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore from google.cloud.gkeconnect import gateway_v1beta1 -def sample_get_resource(): +def sample_generate_credentials(): # Create a client - client = gateway_v1beta1.GatewayServiceClient() + client = gateway_v1beta1.GatewayControlClient() # Initialize request argument(s) - request = httpbody_pb2.HttpBody( + request = gateway_v1beta1.GenerateCredentialsRequest( + name="name_value", ) # Make the request - response = client.get_resource(request=request) + response = client.generate_credentials(request=request) # Handle the response print(response) -# [END connectgateway_v1beta1_generated_GatewayService_GetResource_sync] +# [END connectgateway_v1beta1_generated_GatewayControl_GenerateCredentials_sync] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_post_resource_async.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_post_resource_async.py deleted file mode 100644 index bda10b9a013a..000000000000 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_post_resource_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PostResource -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-gke-connect-gateway - - -# [START connectgateway_v1beta1_generated_GatewayService_PostResource_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore -from google.cloud.gkeconnect import gateway_v1beta1 - - -async def sample_post_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = await client.post_resource(request=request) - - # Handle the response - print(response) - -# [END connectgateway_v1beta1_generated_GatewayService_PostResource_async] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_post_resource_sync.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_post_resource_sync.py deleted file mode 100644 index 2a5747eb1c12..000000000000 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_post_resource_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PostResource -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-gke-connect-gateway - - -# [START connectgateway_v1beta1_generated_GatewayService_PostResource_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore -from google.cloud.gkeconnect import gateway_v1beta1 - - -def sample_post_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = client.post_resource(request=request) - - # Handle the response - print(response) - -# [END connectgateway_v1beta1_generated_GatewayService_PostResource_sync] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_put_resource_async.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_put_resource_async.py deleted file mode 100644 index 2dd639963534..000000000000 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_put_resource_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PutResource -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-gke-connect-gateway - - -# [START connectgateway_v1beta1_generated_GatewayService_PutResource_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore -from google.cloud.gkeconnect import gateway_v1beta1 - - -async def sample_put_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceAsyncClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = await client.put_resource(request=request) - - # Handle the response - print(response) - -# [END connectgateway_v1beta1_generated_GatewayService_PutResource_async] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_put_resource_sync.py b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_put_resource_sync.py deleted file mode 100644 index e549ff995cd5..000000000000 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/connectgateway_v1beta1_generated_gateway_service_put_resource_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PutResource -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-gke-connect-gateway - - -# [START connectgateway_v1beta1_generated_GatewayService_PutResource_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.api import httpbody_pb2 # type: ignore -from google.cloud.gkeconnect import gateway_v1beta1 - - -def sample_put_resource(): - # Create a client - client = gateway_v1beta1.GatewayServiceClient() - - # Initialize request argument(s) - request = httpbody_pb2.HttpBody( - ) - - # Make the request - response = client.put_resource(request=request) - - # Handle the response - print(response) - -# [END connectgateway_v1beta1_generated_GatewayService_PutResource_sync] diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1.json b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1.json new file mode 100644 index 000000000000..0b1343a8f6ad --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1.json @@ -0,0 +1,168 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.gkeconnect.gateway.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-gke-connect-gateway", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.gkeconnect.gateway_v1.GatewayControlAsyncClient", + "shortName": "GatewayControlAsyncClient" + }, + "fullName": "google.cloud.gkeconnect.gateway_v1.GatewayControlAsyncClient.generate_credentials", + "method": { + "fullName": "google.cloud.gkeconnect.gateway.v1.GatewayControl.GenerateCredentials", + "service": { + "fullName": "google.cloud.gkeconnect.gateway.v1.GatewayControl", + "shortName": "GatewayControl" + }, + "shortName": "GenerateCredentials" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsResponse", + "shortName": "generate_credentials" + }, + "description": "Sample for GenerateCredentials", + "file": "connectgateway_v1_generated_gateway_control_generate_credentials_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "connectgateway_v1_generated_GatewayControl_GenerateCredentials_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "connectgateway_v1_generated_gateway_control_generate_credentials_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.gkeconnect.gateway_v1.GatewayControlClient", + "shortName": "GatewayControlClient" + }, + "fullName": "google.cloud.gkeconnect.gateway_v1.GatewayControlClient.generate_credentials", + "method": { + "fullName": "google.cloud.gkeconnect.gateway.v1.GatewayControl.GenerateCredentials", + "service": { + "fullName": "google.cloud.gkeconnect.gateway.v1.GatewayControl", + "shortName": "GatewayControl" + }, + "shortName": "GenerateCredentials" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.gkeconnect.gateway_v1.types.GenerateCredentialsResponse", + "shortName": "generate_credentials" + }, + "description": "Sample for GenerateCredentials", + "file": "connectgateway_v1_generated_gateway_control_generate_credentials_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "connectgateway_v1_generated_GatewayControl_GenerateCredentials_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "connectgateway_v1_generated_gateway_control_generate_credentials_sync.py" + } + ] +} diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json index 6586a7512b44..7ab5b8ced60e 100644 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json +++ b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-connect-gateway", - "version": "0.1.0" + "version": "0.9.0" }, "snippets": [ { @@ -16,22 +16,22 @@ "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient", - "shortName": "GatewayServiceAsyncClient" + "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayControlAsyncClient", + "shortName": "GatewayControlAsyncClient" }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient.delete_resource", + "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayControlAsyncClient.generate_credentials", "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.DeleteResource", + "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayControl.GenerateCredentials", "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" + "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayControl", + "shortName": "GatewayControl" }, - "shortName": "DeleteResource" + "shortName": "GenerateCredentials" }, "parameters": [ { "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" + "type": "google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsRequest" }, { "name": "retry", @@ -46,14 +46,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "delete_resource" + "resultType": "google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsResponse", + "shortName": "generate_credentials" }, - "description": "Sample for DeleteResource", - "file": "connectgateway_v1beta1_generated_gateway_service_delete_resource_async.py", + "description": "Sample for GenerateCredentials", + "file": "connectgateway_v1beta1_generated_gateway_control_generate_credentials_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_DeleteResource_async", + "regionTag": "connectgateway_v1beta1_generated_GatewayControl_GenerateCredentials_async", "segments": [ { "end": 51, @@ -66,13 +66,13 @@ "type": "SHORT" }, { - "end": 41, - "start": 39, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { "end": 45, - "start": 42, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { @@ -86,28 +86,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "connectgateway_v1beta1_generated_gateway_service_delete_resource_async.py" + "title": "connectgateway_v1beta1_generated_gateway_control_generate_credentials_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient", - "shortName": "GatewayServiceClient" + "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayControlClient", + "shortName": "GatewayControlClient" }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient.delete_resource", + "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayControlClient.generate_credentials", "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.DeleteResource", + "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayControl.GenerateCredentials", "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" + "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayControl", + "shortName": "GatewayControl" }, - "shortName": "DeleteResource" + "shortName": "GenerateCredentials" }, "parameters": [ { "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" + "type": "google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsRequest" }, { "name": "retry", @@ -122,14 +122,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "delete_resource" + "resultType": "google.cloud.gkeconnect.gateway_v1beta1.types.GenerateCredentialsResponse", + "shortName": "generate_credentials" }, - "description": "Sample for DeleteResource", - "file": "connectgateway_v1beta1_generated_gateway_service_delete_resource_sync.py", + "description": "Sample for GenerateCredentials", + "file": "connectgateway_v1beta1_generated_gateway_control_generate_credentials_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_DeleteResource_sync", + "regionTag": "connectgateway_v1beta1_generated_GatewayControl_GenerateCredentials_sync", "segments": [ { "end": 51, @@ -142,13 +142,13 @@ "type": "SHORT" }, { - "end": 41, - "start": 39, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { "end": 45, - "start": 42, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { @@ -162,619 +162,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "connectgateway_v1beta1_generated_gateway_service_delete_resource_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient", - "shortName": "GatewayServiceAsyncClient" - }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient.get_resource", - "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.GetResource", - "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" - }, - "shortName": "GetResource" - }, - "parameters": [ - { - "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "get_resource" - }, - "description": "Sample for GetResource", - "file": "connectgateway_v1beta1_generated_gateway_service_get_resource_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_GetResource_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "connectgateway_v1beta1_generated_gateway_service_get_resource_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient", - "shortName": "GatewayServiceClient" - }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient.get_resource", - "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.GetResource", - "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" - }, - "shortName": "GetResource" - }, - "parameters": [ - { - "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "get_resource" - }, - "description": "Sample for GetResource", - "file": "connectgateway_v1beta1_generated_gateway_service_get_resource_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_GetResource_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "connectgateway_v1beta1_generated_gateway_service_get_resource_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient", - "shortName": "GatewayServiceAsyncClient" - }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient.patch_resource", - "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.PatchResource", - "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" - }, - "shortName": "PatchResource" - }, - "parameters": [ - { - "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "patch_resource" - }, - "description": "Sample for PatchResource", - "file": "connectgateway_v1beta1_generated_gateway_service_patch_resource_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_PatchResource_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "connectgateway_v1beta1_generated_gateway_service_patch_resource_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient", - "shortName": "GatewayServiceClient" - }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient.patch_resource", - "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.PatchResource", - "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" - }, - "shortName": "PatchResource" - }, - "parameters": [ - { - "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "patch_resource" - }, - "description": "Sample for PatchResource", - "file": "connectgateway_v1beta1_generated_gateway_service_patch_resource_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_PatchResource_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "connectgateway_v1beta1_generated_gateway_service_patch_resource_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient", - "shortName": "GatewayServiceAsyncClient" - }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient.post_resource", - "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.PostResource", - "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" - }, - "shortName": "PostResource" - }, - "parameters": [ - { - "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "post_resource" - }, - "description": "Sample for PostResource", - "file": "connectgateway_v1beta1_generated_gateway_service_post_resource_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_PostResource_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "connectgateway_v1beta1_generated_gateway_service_post_resource_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient", - "shortName": "GatewayServiceClient" - }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient.post_resource", - "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.PostResource", - "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" - }, - "shortName": "PostResource" - }, - "parameters": [ - { - "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "post_resource" - }, - "description": "Sample for PostResource", - "file": "connectgateway_v1beta1_generated_gateway_service_post_resource_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_PostResource_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "connectgateway_v1beta1_generated_gateway_service_post_resource_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient", - "shortName": "GatewayServiceAsyncClient" - }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceAsyncClient.put_resource", - "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.PutResource", - "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" - }, - "shortName": "PutResource" - }, - "parameters": [ - { - "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "put_resource" - }, - "description": "Sample for PutResource", - "file": "connectgateway_v1beta1_generated_gateway_service_put_resource_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_PutResource_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "connectgateway_v1beta1_generated_gateway_service_put_resource_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient", - "shortName": "GatewayServiceClient" - }, - "fullName": "google.cloud.gkeconnect.gateway_v1beta1.GatewayServiceClient.put_resource", - "method": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService.PutResource", - "service": { - "fullName": "google.cloud.gkeconnect.gateway.v1beta1.GatewayService", - "shortName": "GatewayService" - }, - "shortName": "PutResource" - }, - "parameters": [ - { - "name": "request", - "type": "google.api.httpbody_pb2.HttpBody" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api.httpbody_pb2.HttpBody", - "shortName": "put_resource" - }, - "description": "Sample for PutResource", - "file": "connectgateway_v1beta1_generated_gateway_service_put_resource_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "connectgateway_v1beta1_generated_GatewayService_PutResource_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "connectgateway_v1beta1_generated_gateway_service_put_resource_sync.py" + "title": "connectgateway_v1beta1_generated_gateway_control_generate_credentials_sync.py" } ] } diff --git a/packages/google-cloud-gke-connect-gateway/scripts/fixup_gateway_v1_keywords.py b/packages/google-cloud-gke-connect-gateway/scripts/fixup_gateway_v1_keywords.py new file mode 100644 index 000000000000..10884865fe8a --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/scripts/fixup_gateway_v1_keywords.py @@ -0,0 +1,176 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class gatewayCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'generate_credentials': ('name', 'force_use_agent', 'version', 'kubernetes_namespace', 'operating_system', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=gatewayCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the gateway client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-gke-connect-gateway/scripts/fixup_gateway_v1beta1_keywords.py b/packages/google-cloud-gke-connect-gateway/scripts/fixup_gateway_v1beta1_keywords.py index 56dd2027e348..10884865fe8a 100644 --- a/packages/google-cloud-gke-connect-gateway/scripts/fixup_gateway_v1beta1_keywords.py +++ b/packages/google-cloud-gke-connect-gateway/scripts/fixup_gateway_v1beta1_keywords.py @@ -39,11 +39,7 @@ def partition( class gatewayCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'delete_resource': ('content_type', 'data', 'extensions', ), - 'get_resource': ('content_type', 'data', 'extensions', ), - 'patch_resource': ('content_type', 'data', 'extensions', ), - 'post_resource': ('content_type', 'data', 'extensions', ), - 'put_resource': ('content_type', 'data', 'extensions', ), + 'generate_credentials': ('name', 'force_use_agent', 'version', 'kubernetes_namespace', 'operating_system', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1/__init__.py b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_service.py b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1/test_gateway_control.py similarity index 56% rename from packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_service.py rename to packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1/test_gateway_control.py index 97430148490d..67c78f2a83b7 100644 --- a/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_service.py +++ b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1/test_gateway_control.py @@ -22,9 +22,10 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math -from google.api import httpbody_pb2 # type: ignore from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template from google.api_core import api_core_version, client_options from google.api_core import exceptions as core_exceptions @@ -33,18 +34,21 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore +from google.protobuf import json_format import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session -from google.cloud.gkeconnect.gateway_v1beta1.services.gateway_service import ( - GatewayServiceAsyncClient, - GatewayServiceClient, +from google.cloud.gkeconnect.gateway_v1.services.gateway_control import ( + GatewayControlAsyncClient, + GatewayControlClient, transports, ) +from google.cloud.gkeconnect.gateway_v1.types import control def client_cert_source_callback(): @@ -80,40 +84,40 @@ def test__get_default_mtls_endpoint(): sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" non_googleapi = "api.example.com" - assert GatewayServiceClient._get_default_mtls_endpoint(None) is None + assert GatewayControlClient._get_default_mtls_endpoint(None) is None assert ( - GatewayServiceClient._get_default_mtls_endpoint(api_endpoint) + GatewayControlClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint ) assert ( - GatewayServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + GatewayControlClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint ) assert ( - GatewayServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + GatewayControlClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint ) assert ( - GatewayServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + GatewayControlClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint ) assert ( - GatewayServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + GatewayControlClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi ) def test__read_environment_variables(): - assert GatewayServiceClient._read_environment_variables() == (False, "auto", None) + assert GatewayControlClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert GatewayServiceClient._read_environment_variables() == ( + assert GatewayControlClient._read_environment_variables() == ( True, "auto", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert GatewayServiceClient._read_environment_variables() == ( + assert GatewayControlClient._read_environment_variables() == ( False, "auto", None, @@ -123,28 +127,28 @@ def test__read_environment_variables(): os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError) as excinfo: - GatewayServiceClient._read_environment_variables() + GatewayControlClient._read_environment_variables() assert ( str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert GatewayServiceClient._read_environment_variables() == ( + assert GatewayControlClient._read_environment_variables() == ( False, "never", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert GatewayServiceClient._read_environment_variables() == ( + assert GatewayControlClient._read_environment_variables() == ( False, "always", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert GatewayServiceClient._read_environment_variables() == ( + assert GatewayControlClient._read_environment_variables() == ( False, "auto", None, @@ -152,14 +156,14 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: - GatewayServiceClient._read_environment_variables() + GatewayControlClient._read_environment_variables() assert ( str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert GatewayServiceClient._read_environment_variables() == ( + assert GatewayControlClient._read_environment_variables() == ( False, "auto", "foo.com", @@ -170,13 +174,13 @@ def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() - assert GatewayServiceClient._get_client_cert_source(None, False) is None + assert GatewayControlClient._get_client_cert_source(None, False) is None assert ( - GatewayServiceClient._get_client_cert_source(mock_provided_cert_source, False) + GatewayControlClient._get_client_cert_source(mock_provided_cert_source, False) is None ) assert ( - GatewayServiceClient._get_client_cert_source(mock_provided_cert_source, True) + GatewayControlClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source ) @@ -188,11 +192,11 @@ def test__get_client_cert_source(): return_value=mock_default_cert_source, ): assert ( - GatewayServiceClient._get_client_cert_source(None, True) + GatewayControlClient._get_client_cert_source(None, True) is mock_default_cert_source ) assert ( - GatewayServiceClient._get_client_cert_source( + GatewayControlClient._get_client_cert_source( mock_provided_cert_source, "true" ) is mock_provided_cert_source @@ -200,64 +204,64 @@ def test__get_client_cert_source(): @mock.patch.object( - GatewayServiceClient, + GatewayControlClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(GatewayServiceClient), + modify_default_endpoint_template(GatewayControlClient), ) @mock.patch.object( - GatewayServiceAsyncClient, + GatewayControlAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(GatewayServiceAsyncClient), + modify_default_endpoint_template(GatewayControlAsyncClient), ) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() - default_universe = GatewayServiceClient._DEFAULT_UNIVERSE - default_endpoint = GatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + default_universe = GatewayControlClient._DEFAULT_UNIVERSE + default_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=default_universe ) mock_universe = "bar.com" - mock_endpoint = GatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + mock_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=mock_universe ) assert ( - GatewayServiceClient._get_api_endpoint( + GatewayControlClient._get_api_endpoint( api_override, mock_client_cert_source, default_universe, "always" ) == api_override ) assert ( - GatewayServiceClient._get_api_endpoint( + GatewayControlClient._get_api_endpoint( None, mock_client_cert_source, default_universe, "auto" ) - == GatewayServiceClient.DEFAULT_MTLS_ENDPOINT + == GatewayControlClient.DEFAULT_MTLS_ENDPOINT ) assert ( - GatewayServiceClient._get_api_endpoint(None, None, default_universe, "auto") + GatewayControlClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint ) assert ( - GatewayServiceClient._get_api_endpoint(None, None, default_universe, "always") - == GatewayServiceClient.DEFAULT_MTLS_ENDPOINT + GatewayControlClient._get_api_endpoint(None, None, default_universe, "always") + == GatewayControlClient.DEFAULT_MTLS_ENDPOINT ) assert ( - GatewayServiceClient._get_api_endpoint( + GatewayControlClient._get_api_endpoint( None, mock_client_cert_source, default_universe, "always" ) - == GatewayServiceClient.DEFAULT_MTLS_ENDPOINT + == GatewayControlClient.DEFAULT_MTLS_ENDPOINT ) assert ( - GatewayServiceClient._get_api_endpoint(None, None, mock_universe, "never") + GatewayControlClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint ) assert ( - GatewayServiceClient._get_api_endpoint(None, None, default_universe, "never") + GatewayControlClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint ) with pytest.raises(MutualTLSChannelError) as excinfo: - GatewayServiceClient._get_api_endpoint( + GatewayControlClient._get_api_endpoint( None, mock_client_cert_source, mock_universe, "auto" ) assert ( @@ -271,29 +275,30 @@ def test__get_universe_domain(): universe_domain_env = "bar.com" assert ( - GatewayServiceClient._get_universe_domain( + GatewayControlClient._get_universe_domain( client_universe_domain, universe_domain_env ) == client_universe_domain ) assert ( - GatewayServiceClient._get_universe_domain(None, universe_domain_env) + GatewayControlClient._get_universe_domain(None, universe_domain_env) == universe_domain_env ) assert ( - GatewayServiceClient._get_universe_domain(None, None) - == GatewayServiceClient._DEFAULT_UNIVERSE + GatewayControlClient._get_universe_domain(None, None) + == GatewayControlClient._DEFAULT_UNIVERSE ) with pytest.raises(ValueError) as excinfo: - GatewayServiceClient._get_universe_domain("", None) + GatewayControlClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (GatewayServiceClient, transports.GatewayServiceGrpcTransport, "grpc"), + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc"), + (GatewayControlClient, transports.GatewayControlRestTransport, "rest"), ], ) def test__validate_universe_domain(client_class, transport_class, transport_name): @@ -372,11 +377,12 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (GatewayServiceClient, "grpc"), - (GatewayServiceAsyncClient, "grpc_asyncio"), + (GatewayControlClient, "grpc"), + (GatewayControlAsyncClient, "grpc_asyncio"), + (GatewayControlClient, "rest"), ], ) -def test_gateway_service_client_from_service_account_info(client_class, transport_name): +def test_gateway_control_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" @@ -387,17 +393,22 @@ def test_gateway_service_client_from_service_account_info(client_class, transpor assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("connectgateway.googleapis.com:443") + assert client.transport._host == ( + "connectgateway.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://connectgateway.googleapis.com" + ) @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.GatewayServiceGrpcTransport, "grpc"), - (transports.GatewayServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.GatewayControlGrpcTransport, "grpc"), + (transports.GatewayControlGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.GatewayControlRestTransport, "rest"), ], ) -def test_gateway_service_client_service_account_always_use_jwt( +def test_gateway_control_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( @@ -418,11 +429,12 @@ def test_gateway_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (GatewayServiceClient, "grpc"), - (GatewayServiceAsyncClient, "grpc_asyncio"), + (GatewayControlClient, "grpc"), + (GatewayControlAsyncClient, "grpc_asyncio"), + (GatewayControlClient, "rest"), ], ) -def test_gateway_service_client_from_service_account_file(client_class, transport_name): +def test_gateway_control_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" @@ -440,52 +452,58 @@ def test_gateway_service_client_from_service_account_file(client_class, transpor assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("connectgateway.googleapis.com:443") + assert client.transport._host == ( + "connectgateway.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://connectgateway.googleapis.com" + ) -def test_gateway_service_client_get_transport_class(): - transport = GatewayServiceClient.get_transport_class() +def test_gateway_control_client_get_transport_class(): + transport = GatewayControlClient.get_transport_class() available_transports = [ - transports.GatewayServiceGrpcTransport, + transports.GatewayControlGrpcTransport, + transports.GatewayControlRestTransport, ] assert transport in available_transports - transport = GatewayServiceClient.get_transport_class("grpc") - assert transport == transports.GatewayServiceGrpcTransport + transport = GatewayControlClient.get_transport_class("grpc") + assert transport == transports.GatewayControlGrpcTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (GatewayServiceClient, transports.GatewayServiceGrpcTransport, "grpc"), + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc"), ( - GatewayServiceAsyncClient, - transports.GatewayServiceGrpcAsyncIOTransport, + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, "grpc_asyncio", ), + (GatewayControlClient, transports.GatewayControlRestTransport, "rest"), ], ) @mock.patch.object( - GatewayServiceClient, + GatewayControlClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(GatewayServiceClient), + modify_default_endpoint_template(GatewayControlClient), ) @mock.patch.object( - GatewayServiceAsyncClient, + GatewayControlAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(GatewayServiceAsyncClient), + modify_default_endpoint_template(GatewayControlAsyncClient), ) -def test_gateway_service_client_client_options( +def test_gateway_control_client_client_options( client_class, transport_class, transport_name ): # Check that if channel is provided we won't create a new one. - with mock.patch.object(GatewayServiceClient, "get_transport_class") as gtc: + with mock.patch.object(GatewayControlClient, "get_transport_class") as gtc: transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(GatewayServiceClient, "get_transport_class") as gtc: + with mock.patch.object(GatewayControlClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() @@ -608,34 +626,36 @@ def test_gateway_service_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - (GatewayServiceClient, transports.GatewayServiceGrpcTransport, "grpc", "true"), + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc", "true"), ( - GatewayServiceAsyncClient, - transports.GatewayServiceGrpcAsyncIOTransport, + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, "grpc_asyncio", "true", ), - (GatewayServiceClient, transports.GatewayServiceGrpcTransport, "grpc", "false"), + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc", "false"), ( - GatewayServiceAsyncClient, - transports.GatewayServiceGrpcAsyncIOTransport, + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, "grpc_asyncio", "false", ), + (GatewayControlClient, transports.GatewayControlRestTransport, "rest", "true"), + (GatewayControlClient, transports.GatewayControlRestTransport, "rest", "false"), ], ) @mock.patch.object( - GatewayServiceClient, + GatewayControlClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(GatewayServiceClient), + modify_default_endpoint_template(GatewayControlClient), ) @mock.patch.object( - GatewayServiceAsyncClient, + GatewayControlAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(GatewayServiceAsyncClient), + modify_default_endpoint_template(GatewayControlAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_gateway_service_client_mtls_env_auto( +def test_gateway_control_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env ): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default @@ -738,19 +758,19 @@ def test_gateway_service_client_mtls_env_auto( @pytest.mark.parametrize( - "client_class", [GatewayServiceClient, GatewayServiceAsyncClient] + "client_class", [GatewayControlClient, GatewayControlAsyncClient] ) @mock.patch.object( - GatewayServiceClient, + GatewayControlClient, "DEFAULT_ENDPOINT", - modify_default_endpoint(GatewayServiceClient), + modify_default_endpoint(GatewayControlClient), ) @mock.patch.object( - GatewayServiceAsyncClient, + GatewayControlAsyncClient, "DEFAULT_ENDPOINT", - modify_default_endpoint(GatewayServiceAsyncClient), + modify_default_endpoint(GatewayControlAsyncClient), ) -def test_gateway_service_client_get_mtls_endpoint_and_cert_source(client_class): +def test_gateway_control_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". @@ -842,27 +862,27 @@ def test_gateway_service_client_get_mtls_endpoint_and_cert_source(client_class): @pytest.mark.parametrize( - "client_class", [GatewayServiceClient, GatewayServiceAsyncClient] + "client_class", [GatewayControlClient, GatewayControlAsyncClient] ) @mock.patch.object( - GatewayServiceClient, + GatewayControlClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(GatewayServiceClient), + modify_default_endpoint_template(GatewayControlClient), ) @mock.patch.object( - GatewayServiceAsyncClient, + GatewayControlAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(GatewayServiceAsyncClient), + modify_default_endpoint_template(GatewayControlAsyncClient), ) -def test_gateway_service_client_client_api_endpoint(client_class): +def test_gateway_control_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" - default_universe = GatewayServiceClient._DEFAULT_UNIVERSE - default_endpoint = GatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + default_universe = GatewayControlClient._DEFAULT_UNIVERSE + default_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=default_universe ) mock_universe = "bar.com" - mock_endpoint = GatewayServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + mock_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=mock_universe ) @@ -930,15 +950,16 @@ def test_gateway_service_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (GatewayServiceClient, transports.GatewayServiceGrpcTransport, "grpc"), + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc"), ( - GatewayServiceAsyncClient, - transports.GatewayServiceGrpcAsyncIOTransport, + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, "grpc_asyncio", ), + (GatewayControlClient, transports.GatewayControlRestTransport, "rest"), ], ) -def test_gateway_service_client_client_options_scopes( +def test_gateway_control_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. @@ -967,20 +988,21 @@ def test_gateway_service_client_client_options_scopes( "client_class,transport_class,transport_name,grpc_helpers", [ ( - GatewayServiceClient, - transports.GatewayServiceGrpcTransport, + GatewayControlClient, + transports.GatewayControlGrpcTransport, "grpc", grpc_helpers, ), ( - GatewayServiceAsyncClient, - transports.GatewayServiceGrpcAsyncIOTransport, + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async, ), + (GatewayControlClient, transports.GatewayControlRestTransport, "rest", None), ], ) -def test_gateway_service_client_client_options_credentials_file( +def test_gateway_control_client_client_options_credentials_file( client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. @@ -1004,12 +1026,12 @@ def test_gateway_service_client_client_options_credentials_file( ) -def test_gateway_service_client_client_options_from_dict(): +def test_gateway_control_client_client_options_from_dict(): with mock.patch( - "google.cloud.gkeconnect.gateway_v1beta1.services.gateway_service.transports.GatewayServiceGrpcTransport.__init__" + "google.cloud.gkeconnect.gateway_v1.services.gateway_control.transports.GatewayControlGrpcTransport.__init__" ) as grpc_transport: grpc_transport.return_value = None - client = GatewayServiceClient( + client = GatewayControlClient( client_options={"api_endpoint": "squid.clam.whelk"} ) grpc_transport.assert_called_once_with( @@ -1029,20 +1051,20 @@ def test_gateway_service_client_client_options_from_dict(): "client_class,transport_class,transport_name,grpc_helpers", [ ( - GatewayServiceClient, - transports.GatewayServiceGrpcTransport, + GatewayControlClient, + transports.GatewayControlGrpcTransport, "grpc", grpc_helpers, ), ( - GatewayServiceAsyncClient, - transports.GatewayServiceGrpcAsyncIOTransport, + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async, ), ], ) -def test_gateway_service_client_create_channel_credentials_file( +def test_gateway_control_client_create_channel_credentials_file( client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. @@ -1097,12 +1119,12 @@ def test_gateway_service_client_create_channel_credentials_file( @pytest.mark.parametrize( "request_type", [ - httpbody_pb2.HttpBody, + control.GenerateCredentialsRequest, dict, ], ) -def test_get_resource(request_type, transport: str = "grpc"): - client = GatewayServiceClient( +def test_generate_credentials(request_type, transport: str = "grpc"): + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1112,49 +1134,53 @@ def test_get_resource(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", + call.return_value = control.GenerateCredentialsResponse( + kubeconfig=b"kubeconfig_blob", + endpoint="endpoint_value", ) - response = client.get_resource(request) + response = client.generate_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() + request = control.GenerateCredentialsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" + assert isinstance(response, control.GenerateCredentialsResponse) + assert response.kubeconfig == b"kubeconfig_blob" + assert response.endpoint == "endpoint_value" -def test_get_resource_empty_call(): +def test_generate_credentials_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceClient( + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_resource() + client.generate_credentials() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() + assert args[0] == control.GenerateCredentialsRequest() -def test_get_resource_non_empty_request_with_auto_populated_field(): +def test_generate_credentials_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. - client = GatewayServiceClient( + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -1162,28 +1188,34 @@ def test_get_resource_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = httpbody_pb2.HttpBody( - content_type="content_type_value", + request = control.GenerateCredentialsRequest( + name="name_value", + version="version_value", + kubernetes_namespace="kubernetes_namespace_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_resource(request=request) + client.generate_credentials(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody( - content_type="content_type_value", + assert args[0] == control.GenerateCredentialsRequest( + name="name_value", + version="version_value", + kubernetes_namespace="kubernetes_namespace_value", ) -def test_get_resource_use_cached_wrapped_rpc(): +def test_generate_credentials_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = GatewayServiceClient( + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -1193,21 +1225,25 @@ def test_get_resource_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_resource in client._transport._wrapped_methods + assert ( + client._transport.generate_credentials in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_resource] = mock_rpc + client._transport._wrapped_methods[ + client._transport.generate_credentials + ] = mock_rpc request = {} - client.get_resource(request) + client.generate_credentials(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_resource(request) + client.generate_credentials(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -1215,37 +1251,39 @@ def test_get_resource_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_resource_empty_call_async(): +async def test_generate_credentials_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceAsyncClient( + client = GatewayControlAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", + control.GenerateCredentialsResponse( + kubeconfig=b"kubeconfig_blob", + endpoint="endpoint_value", ) ) - response = await client.get_resource() + response = await client.generate_credentials() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() + assert args[0] == control.GenerateCredentialsRequest() @pytest.mark.asyncio -async def test_get_resource_async_use_cached_wrapped_rpc( +async def test_generate_credentials_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = GatewayServiceAsyncClient( + client = GatewayControlAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1256,7 +1294,7 @@ async def test_get_resource_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_resource + client._client._transport.generate_credentials in client._client._transport._wrapped_methods ) @@ -1264,16 +1302,16 @@ async def test_get_resource_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_resource + client._client._transport.generate_credentials ] = mock_rpc request = {} - await client.get_resource(request) + await client.generate_credentials(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_resource(request) + await client.generate_credentials(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -1281,10 +1319,10 @@ async def test_get_resource_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_resource_async( - transport: str = "grpc_asyncio", request_type=httpbody_pb2.HttpBody +async def test_generate_credentials_async( + transport: str = "grpc_asyncio", request_type=control.GenerateCredentialsRequest ): - client = GatewayServiceAsyncClient( + client = GatewayControlAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1294,145 +1332,149 @@ async def test_get_resource_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", + control.GenerateCredentialsResponse( + kubeconfig=b"kubeconfig_blob", + endpoint="endpoint_value", ) ) - response = await client.get_resource(request) + response = await client.generate_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() + request = control.GenerateCredentialsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" + assert isinstance(response, control.GenerateCredentialsResponse) + assert response.kubeconfig == b"kubeconfig_blob" + assert response.endpoint == "endpoint_value" @pytest.mark.asyncio -async def test_get_resource_async_from_dict(): - await test_get_resource_async(request_type=dict) +async def test_generate_credentials_async_from_dict(): + await test_generate_credentials_async(request_type=dict) -def test_get_resource_from_dict_foreign(): - client = GatewayServiceClient( +def test_generate_credentials_field_headers(): + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody() - response = client.get_resource( - request={ - "content_type": "content_type_value", - "data": b"data_blob", - "extensions": [ - any_pb2.Any(type_url="type.googleapis.com/google.protobuf.Empty") - ], - } - ) - call.assert_called() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control.GenerateCredentialsRequest() -@pytest.mark.parametrize( - "request_type", - [ - httpbody_pb2.HttpBody, - dict, - ], -) -def test_post_resource(request_type, transport: str = "grpc"): - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.post_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - response = client.post_resource(request) + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + call.return_value = control.GenerateCredentialsResponse() + client.generate_credentials(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() assert args[0] == request - # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_post_resource_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceClient( +@pytest.mark.asyncio +async def test_generate_credentials_field_headers_async(): + client = GatewayControlAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control.GenerateCredentialsRequest() + + request.name = "name_value" + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.post_resource), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control.GenerateCredentialsResponse() ) - client.post_resource() - call.assert_called() + await client.generate_credentials(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_post_resource_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = GatewayServiceClient( + +@pytest.mark.parametrize( + "request_type", + [ + control.GenerateCredentialsRequest, + dict, + ], +) +def test_generate_credentials_rest(request_type): + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = httpbody_pb2.HttpBody( - content_type="content_type_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/memberships/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.post_resource), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.post_resource(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody( - content_type="content_type_value", + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = control.GenerateCredentialsResponse( + kubeconfig=b"kubeconfig_blob", + endpoint="endpoint_value", ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = control.GenerateCredentialsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_credentials(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, control.GenerateCredentialsResponse) + assert response.kubeconfig == b"kubeconfig_blob" + assert response.endpoint == "endpoint_value" + -def test_post_resource_use_cached_wrapped_rpc(): +def test_generate_credentials_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = GatewayServiceClient( + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1440,965 +1482,291 @@ def test_post_resource_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.post_resource in client._transport._wrapped_methods + assert ( + client._transport.generate_credentials in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.post_resource] = mock_rpc + client._transport._wrapped_methods[ + client._transport.generate_credentials + ] = mock_rpc + request = {} - client.post_resource(request) + client.generate_credentials(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.post_resource(request) + client.generate_credentials(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_post_resource_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_generate_credentials_rest_required_fields( + request_type=control.GenerateCredentialsRequest, +): + transport_class = transports.GatewayControlRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.post_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - ) - response = await client.post_resource() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_credentials._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_post_resource_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["name"] = "name_value" - # Ensure method has been cached - assert ( - client._client._transport.post_resource - in client._client._transport._wrapped_methods + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_credentials._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force_use_agent", + "kubernetes_namespace", + "operating_system", + "version", ) + ) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.post_resource - ] = mock_rpc + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" - request = {} - await client.post_resource(request) + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = control.GenerateCredentialsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + response_value = Response() + response_value.status_code = 200 - await client.post_resource(request) + # Convert return value to protobuf type + return_value = control.GenerateCredentialsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.generate_credentials(request) -@pytest.mark.asyncio -async def test_post_resource_async( - transport: str = "grpc_asyncio", request_type=httpbody_pb2.HttpBody -): - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.post_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", +def test_generate_credentials_rest_unset_required_fields(): + transport = transports.GatewayControlRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.generate_credentials._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "forceUseAgent", + "kubernetesNamespace", + "operatingSystem", + "version", ) ) - response = await client.post_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" - - -@pytest.mark.asyncio -async def test_post_resource_async_from_dict(): - await test_post_resource_async(request_type=dict) + & set(("name",)) + ) -def test_post_resource_from_dict_foreign(): - client = GatewayServiceClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_credentials_rest_interceptors(null_interceptor): + transport = transports.GatewayControlRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GatewayControlRestInterceptor(), ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.post_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody() - response = client.post_resource( - request={ - "content_type": "content_type_value", - "data": b"data_blob", - "extensions": [ - any_pb2.Any(type_url="type.googleapis.com/google.protobuf.Empty") - ], - } + client = GatewayControlClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GatewayControlRestInterceptor, "post_generate_credentials" + ) as post, mock.patch.object( + transports.GatewayControlRestInterceptor, "pre_generate_credentials" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = control.GenerateCredentialsRequest.pb( + control.GenerateCredentialsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = control.GenerateCredentialsResponse.to_json( + control.GenerateCredentialsResponse() + ) + + request = control.GenerateCredentialsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = control.GenerateCredentialsResponse() + + client.generate_credentials( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - call.assert_called() + pre.assert_called_once() + post.assert_called_once() -@pytest.mark.parametrize( - "request_type", - [ - httpbody_pb2.HttpBody, - dict, - ], -) -def test_delete_resource(request_type, transport: str = "grpc"): - client = GatewayServiceClient( + +def test_generate_credentials_rest_bad_request( + transport: str = "rest", request_type=control.GenerateCredentialsRequest +): + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/memberships/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - response = client.delete_resource(request) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_credentials(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() - assert args[0] == request - # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" +def test_generate_credentials_rest_error(): + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) -def test_delete_resource_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceClient( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.GatewayControlGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_resource), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + with pytest.raises(ValueError): + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client.delete_resource() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() - -def test_delete_resource_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = GatewayServiceClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.GatewayControlGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) + with pytest.raises(ValueError): + client = GatewayControlClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = httpbody_pb2.HttpBody( - content_type="content_type_value", + # It is an error to provide an api_key and a transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_resource), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_resource(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody( - content_type="content_type_value", + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GatewayControlClient( + client_options=options, + transport=transport, ) - -def test_delete_resource_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GatewayControlClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_resource in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # It is an error to provide scopes and a transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GatewayControlClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, ) - client._transport._wrapped_methods[client._transport.delete_resource] = mock_rpc - request = {} - client.delete_resource(request) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - client.delete_resource(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_resource_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - ) - response = await client.delete_resource() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() - - -@pytest.mark.asyncio -async def test_delete_resource_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_resource - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_resource - ] = mock_rpc - - request = {} - await client.delete_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_resource(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_resource_async( - transport: str = "grpc_asyncio", request_type=httpbody_pb2.HttpBody -): - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - ) - response = await client.delete_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" - - -@pytest.mark.asyncio -async def test_delete_resource_async_from_dict(): - await test_delete_resource_async(request_type=dict) - - -def test_delete_resource_from_dict_foreign(): - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody() - response = client.delete_resource( - request={ - "content_type": "content_type_value", - "data": b"data_blob", - "extensions": [ - any_pb2.Any(type_url="type.googleapis.com/google.protobuf.Empty") - ], - } - ) - call.assert_called() - - -@pytest.mark.parametrize( - "request_type", - [ - httpbody_pb2.HttpBody, - dict, - ], -) -def test_put_resource(request_type, transport: str = "grpc"): - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.put_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - response = client.put_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" - - -def test_put_resource_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.put_resource), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.put_resource() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() - - -def test_put_resource_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = httpbody_pb2.HttpBody( - content_type="content_type_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.put_resource), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.put_resource(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody( - content_type="content_type_value", - ) - - -def test_put_resource_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.put_resource in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.put_resource] = mock_rpc - request = {} - client.put_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.put_resource(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_put_resource_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.put_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - ) - response = await client.put_resource() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() - - -@pytest.mark.asyncio -async def test_put_resource_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.put_resource - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.put_resource - ] = mock_rpc - - request = {} - await client.put_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.put_resource(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_put_resource_async( - transport: str = "grpc_asyncio", request_type=httpbody_pb2.HttpBody -): - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.put_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - ) - response = await client.put_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" - - -@pytest.mark.asyncio -async def test_put_resource_async_from_dict(): - await test_put_resource_async(request_type=dict) - - -def test_put_resource_from_dict_foreign(): - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.put_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody() - response = client.put_resource( - request={ - "content_type": "content_type_value", - "data": b"data_blob", - "extensions": [ - any_pb2.Any(type_url="type.googleapis.com/google.protobuf.Empty") - ], - } - ) - call.assert_called() - - -@pytest.mark.parametrize( - "request_type", - [ - httpbody_pb2.HttpBody, - dict, - ], -) -def test_patch_resource(request_type, transport: str = "grpc"): - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.patch_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - response = client.patch_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" - - -def test_patch_resource_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.patch_resource), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.patch_resource() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() - - -def test_patch_resource_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = httpbody_pb2.HttpBody( - content_type="content_type_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.patch_resource), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.patch_resource(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody( - content_type="content_type_value", - ) - - -def test_patch_resource_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.patch_resource in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.patch_resource] = mock_rpc - request = {} - client.patch_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.patch_resource(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_patch_resource_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.patch_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - ) - response = await client.patch_resource() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == httpbody_pb2.HttpBody() - - -@pytest.mark.asyncio -async def test_patch_resource_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.patch_resource - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.patch_resource - ] = mock_rpc - - request = {} - await client.patch_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.patch_resource(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_patch_resource_async( - transport: str = "grpc_asyncio", request_type=httpbody_pb2.HttpBody -): - client = GatewayServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.patch_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - httpbody_pb2.HttpBody( - content_type="content_type_value", - data=b"data_blob", - ) - ) - response = await client.patch_resource(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = httpbody_pb2.HttpBody() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, httpbody_pb2.HttpBody) - assert response.content_type == "content_type_value" - assert response.data == b"data_blob" - - -@pytest.mark.asyncio -async def test_patch_resource_async_from_dict(): - await test_patch_resource_async(request_type=dict) - - -def test_patch_resource_from_dict_foreign(): - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.patch_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = httpbody_pb2.HttpBody() - response = client.patch_resource( - request={ - "content_type": "content_type_value", - "data": b"data_blob", - "extensions": [ - any_pb2.Any(type_url="type.googleapis.com/google.protobuf.Empty") - ], - } - ) - call.assert_called() - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.GatewayServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = GatewayServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.GatewayServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = GatewayServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.GatewayServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = GatewayServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = GatewayServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.GatewayServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = GatewayServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.GatewayServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = GatewayServiceClient(transport=transport) - assert client.transport is transport +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = GatewayControlClient(transport=transport) + assert client.transport is transport def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. - transport = transports.GatewayServiceGrpcTransport( + transport = transports.GatewayControlGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel - transport = transports.GatewayServiceGrpcAsyncIOTransport( + transport = transports.GatewayControlGrpcAsyncIOTransport( credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel @@ -2408,8 +1776,9 @@ def test_transport_get_channel(): @pytest.mark.parametrize( "transport_class", [ - transports.GatewayServiceGrpcTransport, - transports.GatewayServiceGrpcAsyncIOTransport, + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, + transports.GatewayControlRestTransport, ], ) def test_transport_adc(transport_class): @@ -2424,10 +1793,11 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): - transport = GatewayServiceClient.get_transport_class(transport_name)( + transport = GatewayControlClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), ) assert transport.kind == transport_name @@ -2435,43 +1805,37 @@ def test_transport_kind(transport_name): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = GatewayServiceClient( + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance( client.transport, - transports.GatewayServiceGrpcTransport, + transports.GatewayControlGrpcTransport, ) -def test_gateway_service_base_transport_error(): +def test_gateway_control_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.GatewayServiceTransport( + transport = transports.GatewayControlTransport( credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) -def test_gateway_service_base_transport(): +def test_gateway_control_base_transport(): # Instantiate the base transport. with mock.patch( - "google.cloud.gkeconnect.gateway_v1beta1.services.gateway_service.transports.GatewayServiceTransport.__init__" + "google.cloud.gkeconnect.gateway_v1.services.gateway_control.transports.GatewayControlTransport.__init__" ) as Transport: Transport.return_value = None - transport = transports.GatewayServiceTransport( + transport = transports.GatewayControlTransport( credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly # raise NotImplementedError. - methods = ( - "get_resource", - "post_resource", - "delete_resource", - "put_resource", - "patch_resource", - ) + methods = ("generate_credentials",) for method in methods: with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) @@ -2488,16 +1852,16 @@ def test_gateway_service_base_transport(): getattr(transport, r)() -def test_gateway_service_base_transport_with_credentials_file(): +def test_gateway_control_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.cloud.gkeconnect.gateway_v1beta1.services.gateway_service.transports.GatewayServiceTransport._prep_wrapped_messages" + "google.cloud.gkeconnect.gateway_v1.services.gateway_control.transports.GatewayControlTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.GatewayServiceTransport( + transport = transports.GatewayControlTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -2509,22 +1873,22 @@ def test_gateway_service_base_transport_with_credentials_file(): ) -def test_gateway_service_base_transport_with_adc(): +def test_gateway_control_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.gkeconnect.gateway_v1beta1.services.gateway_service.transports.GatewayServiceTransport._prep_wrapped_messages" + "google.cloud.gkeconnect.gateway_v1.services.gateway_control.transports.GatewayControlTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.GatewayServiceTransport() + transport = transports.GatewayControlTransport() adc.assert_called_once() -def test_gateway_service_auth_adc(): +def test_gateway_control_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) - GatewayServiceClient() + GatewayControlClient() adc.assert_called_once_with( scopes=None, default_scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -2535,11 +1899,11 @@ def test_gateway_service_auth_adc(): @pytest.mark.parametrize( "transport_class", [ - transports.GatewayServiceGrpcTransport, - transports.GatewayServiceGrpcAsyncIOTransport, + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, ], ) -def test_gateway_service_transport_auth_adc(transport_class): +def test_gateway_control_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -2555,11 +1919,12 @@ def test_gateway_service_transport_auth_adc(transport_class): @pytest.mark.parametrize( "transport_class", [ - transports.GatewayServiceGrpcTransport, - transports.GatewayServiceGrpcAsyncIOTransport, + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, + transports.GatewayControlRestTransport, ], ) -def test_gateway_service_transport_auth_gdch_credentials(transport_class): +def test_gateway_control_transport_auth_gdch_credentials(transport_class): host = "https://language.com" api_audience_tests = [None, "https://language2.com"] api_audience_expect = [host, "https://language2.com"] @@ -2577,11 +1942,11 @@ def test_gateway_service_transport_auth_gdch_credentials(transport_class): @pytest.mark.parametrize( "transport_class,grpc_helpers", [ - (transports.GatewayServiceGrpcTransport, grpc_helpers), - (transports.GatewayServiceGrpcAsyncIOTransport, grpc_helpers_async), + (transports.GatewayControlGrpcTransport, grpc_helpers), + (transports.GatewayControlGrpcAsyncIOTransport, grpc_helpers_async), ], ) -def test_gateway_service_transport_create_channel(transport_class, grpc_helpers): +def test_gateway_control_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object( @@ -2612,11 +1977,11 @@ def test_gateway_service_transport_create_channel(transport_class, grpc_helpers) @pytest.mark.parametrize( "transport_class", [ - transports.GatewayServiceGrpcTransport, - transports.GatewayServiceGrpcAsyncIOTransport, + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, ], ) -def test_gateway_service_grpc_transport_client_cert_source_for_mtls(transport_class): +def test_gateway_control_grpc_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. @@ -2654,22 +2019,38 @@ def test_gateway_service_grpc_transport_client_cert_source_for_mtls(transport_cl ) +def test_gateway_control_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.GatewayControlRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) -def test_gateway_service_host_no_port(transport_name): - client = GatewayServiceClient( +def test_gateway_control_host_no_port(transport_name): + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="connectgateway.googleapis.com" ), transport=transport_name, ) - assert client.transport._host == ("connectgateway.googleapis.com:443") + assert client.transport._host == ( + "connectgateway.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://connectgateway.googleapis.com" + ) @pytest.mark.parametrize( @@ -2677,24 +2058,51 @@ def test_gateway_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) -def test_gateway_service_host_with_port(transport_name): - client = GatewayServiceClient( +def test_gateway_control_host_with_port(transport_name): + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="connectgateway.googleapis.com:8000" ), transport=transport_name, ) - assert client.transport._host == ("connectgateway.googleapis.com:8000") + assert client.transport._host == ( + "connectgateway.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://connectgateway.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_gateway_control_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = GatewayControlClient( + credentials=creds1, + transport=transport_name, + ) + client2 = GatewayControlClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.generate_credentials._session + session2 = client2.transport.generate_credentials._session + assert session1 != session2 -def test_gateway_service_grpc_transport_channel(): +def test_gateway_control_grpc_transport_channel(): channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. - transport = transports.GatewayServiceGrpcTransport( + transport = transports.GatewayControlGrpcTransport( host="squid.clam.whelk", channel=channel, ) @@ -2703,11 +2111,11 @@ def test_gateway_service_grpc_transport_channel(): assert transport._ssl_channel_credentials == None -def test_gateway_service_grpc_asyncio_transport_channel(): +def test_gateway_control_grpc_asyncio_transport_channel(): channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. - transport = transports.GatewayServiceGrpcAsyncIOTransport( + transport = transports.GatewayControlGrpcAsyncIOTransport( host="squid.clam.whelk", channel=channel, ) @@ -2721,11 +2129,11 @@ def test_gateway_service_grpc_asyncio_transport_channel(): @pytest.mark.parametrize( "transport_class", [ - transports.GatewayServiceGrpcTransport, - transports.GatewayServiceGrpcAsyncIOTransport, + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, ], ) -def test_gateway_service_transport_channel_mtls_with_client_cert_source( +def test_gateway_control_transport_channel_mtls_with_client_cert_source( transport_class, ): with mock.patch( @@ -2775,11 +2183,11 @@ def test_gateway_service_transport_channel_mtls_with_client_cert_source( @pytest.mark.parametrize( "transport_class", [ - transports.GatewayServiceGrpcTransport, - transports.GatewayServiceGrpcAsyncIOTransport, + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, ], ) -def test_gateway_service_transport_channel_mtls_with_adc(transport_class): +def test_gateway_control_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", @@ -2821,7 +2229,7 @@ def test_common_billing_account_path(): expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) - actual = GatewayServiceClient.common_billing_account_path(billing_account) + actual = GatewayControlClient.common_billing_account_path(billing_account) assert expected == actual @@ -2829,10 +2237,10 @@ def test_parse_common_billing_account_path(): expected = { "billing_account": "clam", } - path = GatewayServiceClient.common_billing_account_path(**expected) + path = GatewayControlClient.common_billing_account_path(**expected) # Check that the path construction is reversible. - actual = GatewayServiceClient.parse_common_billing_account_path(path) + actual = GatewayControlClient.parse_common_billing_account_path(path) assert expected == actual @@ -2841,7 +2249,7 @@ def test_common_folder_path(): expected = "folders/{folder}".format( folder=folder, ) - actual = GatewayServiceClient.common_folder_path(folder) + actual = GatewayControlClient.common_folder_path(folder) assert expected == actual @@ -2849,10 +2257,10 @@ def test_parse_common_folder_path(): expected = { "folder": "octopus", } - path = GatewayServiceClient.common_folder_path(**expected) + path = GatewayControlClient.common_folder_path(**expected) # Check that the path construction is reversible. - actual = GatewayServiceClient.parse_common_folder_path(path) + actual = GatewayControlClient.parse_common_folder_path(path) assert expected == actual @@ -2861,7 +2269,7 @@ def test_common_organization_path(): expected = "organizations/{organization}".format( organization=organization, ) - actual = GatewayServiceClient.common_organization_path(organization) + actual = GatewayControlClient.common_organization_path(organization) assert expected == actual @@ -2869,10 +2277,10 @@ def test_parse_common_organization_path(): expected = { "organization": "nudibranch", } - path = GatewayServiceClient.common_organization_path(**expected) + path = GatewayControlClient.common_organization_path(**expected) # Check that the path construction is reversible. - actual = GatewayServiceClient.parse_common_organization_path(path) + actual = GatewayControlClient.parse_common_organization_path(path) assert expected == actual @@ -2881,7 +2289,7 @@ def test_common_project_path(): expected = "projects/{project}".format( project=project, ) - actual = GatewayServiceClient.common_project_path(project) + actual = GatewayControlClient.common_project_path(project) assert expected == actual @@ -2889,10 +2297,10 @@ def test_parse_common_project_path(): expected = { "project": "mussel", } - path = GatewayServiceClient.common_project_path(**expected) + path = GatewayControlClient.common_project_path(**expected) # Check that the path construction is reversible. - actual = GatewayServiceClient.parse_common_project_path(path) + actual = GatewayControlClient.parse_common_project_path(path) assert expected == actual @@ -2903,7 +2311,7 @@ def test_common_location_path(): project=project, location=location, ) - actual = GatewayServiceClient.common_location_path(project, location) + actual = GatewayControlClient.common_location_path(project, location) assert expected == actual @@ -2912,10 +2320,10 @@ def test_parse_common_location_path(): "project": "scallop", "location": "abalone", } - path = GatewayServiceClient.common_location_path(**expected) + path = GatewayControlClient.common_location_path(**expected) # Check that the path construction is reversible. - actual = GatewayServiceClient.parse_common_location_path(path) + actual = GatewayControlClient.parse_common_location_path(path) assert expected == actual @@ -2923,18 +2331,18 @@ def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( - transports.GatewayServiceTransport, "_prep_wrapped_messages" + transports.GatewayControlTransport, "_prep_wrapped_messages" ) as prep: - client = GatewayServiceClient( + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) with mock.patch.object( - transports.GatewayServiceTransport, "_prep_wrapped_messages" + transports.GatewayControlTransport, "_prep_wrapped_messages" ) as prep: - transport_class = GatewayServiceClient.get_transport_class() + transport_class = GatewayControlClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, @@ -2944,7 +2352,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): - client = GatewayServiceAsyncClient( + client = GatewayControlAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) @@ -2958,11 +2366,12 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } for transport, close_name in transports.items(): - client = GatewayServiceClient( + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object( @@ -2975,10 +2384,11 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: - client = GatewayServiceClient( + client = GatewayControlClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. @@ -2992,8 +2402,8 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (GatewayServiceClient, transports.GatewayServiceGrpcTransport), - (GatewayServiceAsyncClient, transports.GatewayServiceGrpcAsyncIOTransport), + (GatewayControlClient, transports.GatewayControlGrpcTransport), + (GatewayControlAsyncClient, transports.GatewayControlGrpcAsyncIOTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_control.py b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_control.py new file mode 100644 index 000000000000..024863c9d32c --- /dev/null +++ b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_control.py @@ -0,0 +1,2090 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest + +from google.cloud.gkeconnect.gateway_v1beta1.services.gateway_control import ( + GatewayControlAsyncClient, + GatewayControlClient, + transports, +) +from google.cloud.gkeconnect.gateway_v1beta1.types import control + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert GatewayControlClient._get_default_mtls_endpoint(None) is None + assert ( + GatewayControlClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + GatewayControlClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + GatewayControlClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + GatewayControlClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + GatewayControlClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert GatewayControlClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert GatewayControlClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert GatewayControlClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + GatewayControlClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert GatewayControlClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert GatewayControlClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert GatewayControlClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + GatewayControlClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert GatewayControlClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert GatewayControlClient._get_client_cert_source(None, False) is None + assert ( + GatewayControlClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + GatewayControlClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + GatewayControlClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + GatewayControlClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + GatewayControlClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GatewayControlClient), +) +@mock.patch.object( + GatewayControlAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GatewayControlAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = GatewayControlClient._DEFAULT_UNIVERSE + default_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + GatewayControlClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + GatewayControlClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == GatewayControlClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + GatewayControlClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + GatewayControlClient._get_api_endpoint(None, None, default_universe, "always") + == GatewayControlClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + GatewayControlClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == GatewayControlClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + GatewayControlClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + GatewayControlClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + GatewayControlClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + GatewayControlClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + GatewayControlClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + GatewayControlClient._get_universe_domain(None, None) + == GatewayControlClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + GatewayControlClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (GatewayControlClient, "grpc"), + (GatewayControlAsyncClient, "grpc_asyncio"), + ], +) +def test_gateway_control_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("connectgateway.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.GatewayControlGrpcTransport, "grpc"), + (transports.GatewayControlGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_gateway_control_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (GatewayControlClient, "grpc"), + (GatewayControlAsyncClient, "grpc_asyncio"), + ], +) +def test_gateway_control_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("connectgateway.googleapis.com:443") + + +def test_gateway_control_client_get_transport_class(): + transport = GatewayControlClient.get_transport_class() + available_transports = [ + transports.GatewayControlGrpcTransport, + ] + assert transport in available_transports + + transport = GatewayControlClient.get_transport_class("grpc") + assert transport == transports.GatewayControlGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc"), + ( + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + GatewayControlClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GatewayControlClient), +) +@mock.patch.object( + GatewayControlAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GatewayControlAsyncClient), +) +def test_gateway_control_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(GatewayControlClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(GatewayControlClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc", "true"), + ( + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc", "false"), + ( + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + GatewayControlClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GatewayControlClient), +) +@mock.patch.object( + GatewayControlAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GatewayControlAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_gateway_control_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [GatewayControlClient, GatewayControlAsyncClient] +) +@mock.patch.object( + GatewayControlClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GatewayControlClient), +) +@mock.patch.object( + GatewayControlAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GatewayControlAsyncClient), +) +def test_gateway_control_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [GatewayControlClient, GatewayControlAsyncClient] +) +@mock.patch.object( + GatewayControlClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GatewayControlClient), +) +@mock.patch.object( + GatewayControlAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GatewayControlAsyncClient), +) +def test_gateway_control_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = GatewayControlClient._DEFAULT_UNIVERSE + default_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = GatewayControlClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (GatewayControlClient, transports.GatewayControlGrpcTransport, "grpc"), + ( + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_gateway_control_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + GatewayControlClient, + transports.GatewayControlGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_gateway_control_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_gateway_control_client_client_options_from_dict(): + with mock.patch( + "google.cloud.gkeconnect.gateway_v1beta1.services.gateway_control.transports.GatewayControlGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = GatewayControlClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + GatewayControlClient, + transports.GatewayControlGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + GatewayControlAsyncClient, + transports.GatewayControlGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_gateway_control_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "connectgateway.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="connectgateway.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + control.GenerateCredentialsRequest, + dict, + ], +) +def test_generate_credentials(request_type, transport: str = "grpc"): + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = control.GenerateCredentialsResponse( + kubeconfig=b"kubeconfig_blob", + endpoint="endpoint_value", + ) + response = client.generate_credentials(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = control.GenerateCredentialsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, control.GenerateCredentialsResponse) + assert response.kubeconfig == b"kubeconfig_blob" + assert response.endpoint == "endpoint_value" + + +def test_generate_credentials_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.generate_credentials() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control.GenerateCredentialsRequest() + + +def test_generate_credentials_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = control.GenerateCredentialsRequest( + name="name_value", + version="version_value", + kubernetes_namespace="kubernetes_namespace_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.generate_credentials(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control.GenerateCredentialsRequest( + name="name_value", + version="version_value", + kubernetes_namespace="kubernetes_namespace_value", + ) + + +def test_generate_credentials_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.generate_credentials in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_credentials + ] = mock_rpc + request = {} + client.generate_credentials(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_credentials(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_generate_credentials_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GatewayControlAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control.GenerateCredentialsResponse( + kubeconfig=b"kubeconfig_blob", + endpoint="endpoint_value", + ) + ) + response = await client.generate_credentials() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == control.GenerateCredentialsRequest() + + +@pytest.mark.asyncio +async def test_generate_credentials_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GatewayControlAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.generate_credentials + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.generate_credentials + ] = mock_rpc + + request = {} + await client.generate_credentials(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.generate_credentials(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_generate_credentials_async( + transport: str = "grpc_asyncio", request_type=control.GenerateCredentialsRequest +): + client = GatewayControlAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control.GenerateCredentialsResponse( + kubeconfig=b"kubeconfig_blob", + endpoint="endpoint_value", + ) + ) + response = await client.generate_credentials(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = control.GenerateCredentialsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, control.GenerateCredentialsResponse) + assert response.kubeconfig == b"kubeconfig_blob" + assert response.endpoint == "endpoint_value" + + +@pytest.mark.asyncio +async def test_generate_credentials_async_from_dict(): + await test_generate_credentials_async(request_type=dict) + + +def test_generate_credentials_field_headers(): + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control.GenerateCredentialsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + call.return_value = control.GenerateCredentialsResponse() + client.generate_credentials(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_credentials_field_headers_async(): + client = GatewayControlAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = control.GenerateCredentialsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_credentials), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + control.GenerateCredentialsResponse() + ) + await client.generate_credentials(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GatewayControlClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GatewayControlClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GatewayControlClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GatewayControlClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = GatewayControlClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.GatewayControlGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.GatewayControlGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = GatewayControlClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.GatewayControlGrpcTransport, + ) + + +def test_gateway_control_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.GatewayControlTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_gateway_control_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.gkeconnect.gateway_v1beta1.services.gateway_control.transports.GatewayControlTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.GatewayControlTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("generate_credentials",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_gateway_control_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.gkeconnect.gateway_v1beta1.services.gateway_control.transports.GatewayControlTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GatewayControlTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_gateway_control_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.gkeconnect.gateway_v1beta1.services.gateway_control.transports.GatewayControlTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GatewayControlTransport() + adc.assert_called_once() + + +def test_gateway_control_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + GatewayControlClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, + ], +) +def test_gateway_control_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, + ], +) +def test_gateway_control_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.GatewayControlGrpcTransport, grpc_helpers), + (transports.GatewayControlGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_gateway_control_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "connectgateway.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="connectgateway.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, + ], +) +def test_gateway_control_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_gateway_control_host_no_port(transport_name): + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="connectgateway.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ("connectgateway.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_gateway_control_host_with_port(transport_name): + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="connectgateway.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ("connectgateway.googleapis.com:8000") + + +def test_gateway_control_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GatewayControlGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_gateway_control_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GatewayControlGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, + ], +) +def test_gateway_control_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.GatewayControlGrpcTransport, + transports.GatewayControlGrpcAsyncIOTransport, + ], +) +def test_gateway_control_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = GatewayControlClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = GatewayControlClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = GatewayControlClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = GatewayControlClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = GatewayControlClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = GatewayControlClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = GatewayControlClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = GatewayControlClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = GatewayControlClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = GatewayControlClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = GatewayControlClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = GatewayControlClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = GatewayControlClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = GatewayControlClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = GatewayControlClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.GatewayControlTransport, "_prep_wrapped_messages" + ) as prep: + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.GatewayControlTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = GatewayControlClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = GatewayControlAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = GatewayControlClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (GatewayControlClient, transports.GatewayControlGrpcTransport), + (GatewayControlAsyncClient, transports.GatewayControlGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md b/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md index cef8c7414214..5503697c10fa 100644 --- a/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md +++ b/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md @@ -1,5 +1,26 @@ # Changelog +## [1.22.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-recaptcha-enterprise-v1.22.0...google-cloud-recaptcha-enterprise-v1.22.1) (2024-09-03) + + +### Documentation + +* minor doc fixes ([7e40c8c](https://github.com/googleapis/google-cloud-python/commit/7e40c8c40039e0c01ef9a8bfea8804edfee48d70)) + +## [1.22.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-recaptcha-enterprise-v1.21.2...google-cloud-recaptcha-enterprise-v1.22.0) (2024-08-22) + + +### Features + +* add `express_settings` to `Key` ([0b90478](https://github.com/googleapis/google-cloud-python/commit/0b90478bb70be96b304397fc433d2dbaf0160d30)) +* add AddIpOverride RPC ([0b90478](https://github.com/googleapis/google-cloud-python/commit/0b90478bb70be96b304397fc433d2dbaf0160d30)) + + +### Documentation + +* clarify `Event.express` field ([0b90478](https://github.com/googleapis/google-cloud-python/commit/0b90478bb70be96b304397fc433d2dbaf0160d30)) +* fix billing, quota, and usecase links ([0b90478](https://github.com/googleapis/google-cloud-python/commit/0b90478bb70be96b304397fc433d2dbaf0160d30)) + ## [1.21.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-recaptcha-enterprise-v1.21.1...google-cloud-recaptcha-enterprise-v1.21.2) (2024-07-30) diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/__init__.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/__init__.py index f01824e24d20..74085402cea4 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/__init__.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/__init__.py @@ -27,11 +27,14 @@ from google.cloud.recaptchaenterprise_v1.types.recaptchaenterprise import ( AccountDefenderAssessment, AccountVerificationInfo, + AddIpOverrideRequest, + AddIpOverrideResponse, AndroidKeySettings, AnnotateAssessmentRequest, AnnotateAssessmentResponse, AppleDeveloperId, Assessment, + AssessmentEnvironment, ChallengeMetrics, CreateAssessmentRequest, CreateFirewallPolicyRequest, @@ -40,6 +43,7 @@ DeleteKeyRequest, EndpointVerificationInfo, Event, + ExpressKeySettings, FirewallAction, FirewallPolicy, FirewallPolicyAssessment, @@ -49,6 +53,7 @@ GetKeyRequest, GetMetricsRequest, IOSKeySettings, + IpOverrideData, Key, ListFirewallPoliciesRequest, ListFirewallPoliciesResponse, @@ -91,11 +96,14 @@ "RecaptchaEnterpriseServiceAsyncClient", "AccountDefenderAssessment", "AccountVerificationInfo", + "AddIpOverrideRequest", + "AddIpOverrideResponse", "AndroidKeySettings", "AnnotateAssessmentRequest", "AnnotateAssessmentResponse", "AppleDeveloperId", "Assessment", + "AssessmentEnvironment", "ChallengeMetrics", "CreateAssessmentRequest", "CreateFirewallPolicyRequest", @@ -104,6 +112,7 @@ "DeleteKeyRequest", "EndpointVerificationInfo", "Event", + "ExpressKeySettings", "FirewallAction", "FirewallPolicy", "FirewallPolicyAssessment", @@ -113,6 +122,7 @@ "GetKeyRequest", "GetMetricsRequest", "IOSKeySettings", + "IpOverrideData", "Key", "ListFirewallPoliciesRequest", "ListFirewallPoliciesResponse", diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py index 558c8aab67c5..bcb1d2f54b4a 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.22.1" # {x-release-please-version} diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/__init__.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/__init__.py index 32e4af3d140c..872b50ab387a 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/__init__.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/__init__.py @@ -25,11 +25,14 @@ from .types.recaptchaenterprise import ( AccountDefenderAssessment, AccountVerificationInfo, + AddIpOverrideRequest, + AddIpOverrideResponse, AndroidKeySettings, AnnotateAssessmentRequest, AnnotateAssessmentResponse, AppleDeveloperId, Assessment, + AssessmentEnvironment, ChallengeMetrics, CreateAssessmentRequest, CreateFirewallPolicyRequest, @@ -38,6 +41,7 @@ DeleteKeyRequest, EndpointVerificationInfo, Event, + ExpressKeySettings, FirewallAction, FirewallPolicy, FirewallPolicyAssessment, @@ -47,6 +51,7 @@ GetKeyRequest, GetMetricsRequest, IOSKeySettings, + IpOverrideData, Key, ListFirewallPoliciesRequest, ListFirewallPoliciesResponse, @@ -88,11 +93,14 @@ "RecaptchaEnterpriseServiceAsyncClient", "AccountDefenderAssessment", "AccountVerificationInfo", + "AddIpOverrideRequest", + "AddIpOverrideResponse", "AndroidKeySettings", "AnnotateAssessmentRequest", "AnnotateAssessmentResponse", "AppleDeveloperId", "Assessment", + "AssessmentEnvironment", "ChallengeMetrics", "CreateAssessmentRequest", "CreateFirewallPolicyRequest", @@ -101,6 +109,7 @@ "DeleteKeyRequest", "EndpointVerificationInfo", "Event", + "ExpressKeySettings", "FirewallAction", "FirewallPolicy", "FirewallPolicyAssessment", @@ -110,6 +119,7 @@ "GetKeyRequest", "GetMetricsRequest", "IOSKeySettings", + "IpOverrideData", "Key", "ListFirewallPoliciesRequest", "ListFirewallPoliciesResponse", diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_metadata.json b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_metadata.json index dda75c44f68f..8722f66f3086 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_metadata.json +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "RecaptchaEnterpriseServiceClient", "rpcs": { + "AddIpOverride": { + "methods": [ + "add_ip_override" + ] + }, "AnnotateAssessment": { "methods": [ "annotate_assessment" @@ -110,6 +115,11 @@ "grpc-async": { "libraryClient": "RecaptchaEnterpriseServiceAsyncClient", "rpcs": { + "AddIpOverride": { + "methods": [ + "add_ip_override" + ] + }, "AnnotateAssessment": { "methods": [ "annotate_assessment" diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py index 558c8aab67c5..bcb1d2f54b4a 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.22.1" # {x-release-please-version} diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/async_client.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/async_client.py index ca4ff9f14e83..1ae940ad5ff1 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/async_client.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/async_client.py @@ -344,7 +344,7 @@ async def sample_create_assessment(): message. parent (:class:`str`): Required. The name of the project in which the - assessment will be created, in the format + assessment is created, in the format ``projects/{project}``. This corresponds to the ``parent`` field @@ -471,7 +471,7 @@ async def sample_annotate_assessment(): on the ``request`` instance; if ``request`` is provided, this should not be set. annotation (:class:`google.cloud.recaptchaenterprise_v1.types.AnnotateAssessmentRequest.Annotation`): - Optional. The annotation that will be + Optional. The annotation that is assigned to the Event. This field can be left empty to provide reasons that apply to an event without concluding whether @@ -587,8 +587,8 @@ async def sample_create_key(): request (Optional[Union[google.cloud.recaptchaenterprise_v1.types.CreateKeyRequest, dict]]): The request object. The create key request message. parent (:class:`str`): - Required. The name of the project in which the key will - be created, in the format ``projects/{project}``. + Required. The name of the project in which the key is + created, in the format ``projects/{project}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -705,8 +705,7 @@ async def sample_list_keys(): The request object. The list keys request message. parent (:class:`str`): Required. The name of the project that contains the keys - that will be listed, in the format - ``projects/{project}``. + that are listed, in the format ``projects/{project}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1052,7 +1051,7 @@ async def sample_update_key(): update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Optional. The mask to control which fields of the key get updated. If the - mask is not present, all fields will be + mask is not present, all fields are updated. This corresponds to the ``update_mask`` field @@ -1302,6 +1301,129 @@ async def sample_migrate_key(): # Done; return the response. return response + async def add_ip_override( + self, + request: Optional[Union[recaptchaenterprise.AddIpOverrideRequest, dict]] = None, + *, + name: Optional[str] = None, + ip_override_data: Optional[recaptchaenterprise.IpOverrideData] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> recaptchaenterprise.AddIpOverrideResponse: + r"""Adds an IP override to a key. The following restrictions hold: + + - The maximum number of IP overrides per key is 100. + - For any conflict (such as IP already exists or IP part of an + existing IP range), an error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import recaptchaenterprise_v1 + + async def sample_add_ip_override(): + # Create a client + client = recaptchaenterprise_v1.RecaptchaEnterpriseServiceAsyncClient() + + # Initialize request argument(s) + ip_override_data = recaptchaenterprise_v1.IpOverrideData() + ip_override_data.ip = "ip_value" + ip_override_data.override_type = "ALLOW" + + request = recaptchaenterprise_v1.AddIpOverrideRequest( + name="name_value", + ip_override_data=ip_override_data, + ) + + # Make the request + response = await client.add_ip_override(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.recaptchaenterprise_v1.types.AddIpOverrideRequest, dict]]): + The request object. The AddIpOverride request message. + name (:class:`str`): + Required. The name of the key to which the IP override + is added, in the format + ``projects/{project}/keys/{key}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ip_override_data (:class:`google.cloud.recaptchaenterprise_v1.types.IpOverrideData`): + Required. IP override added to the + key. + + This corresponds to the ``ip_override_data`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.recaptchaenterprise_v1.types.AddIpOverrideResponse: + Response for AddIpOverride. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, ip_override_data]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, recaptchaenterprise.AddIpOverrideRequest): + request = recaptchaenterprise.AddIpOverrideRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if ip_override_data is not None: + request.ip_override_data = ip_override_data + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.add_ip_override + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def get_metrics( self, request: Optional[Union[recaptchaenterprise.GetMetricsRequest, dict]] = None, @@ -1812,7 +1934,7 @@ async def sample_update_firewall_policy(): update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Optional. The mask to control which fields of the policy get updated. If the - mask is not present, all fields will be + mask is not present, all fields are updated. This corresponds to the ``update_mask`` field diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/client.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/client.py index e26f3ad6450c..bd62b5ca1e8c 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/client.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/client.py @@ -829,7 +829,7 @@ def sample_create_assessment(): message. parent (str): Required. The name of the project in which the - assessment will be created, in the format + assessment is created, in the format ``projects/{project}``. This corresponds to the ``parent`` field @@ -953,7 +953,7 @@ def sample_annotate_assessment(): on the ``request`` instance; if ``request`` is provided, this should not be set. annotation (google.cloud.recaptchaenterprise_v1.types.AnnotateAssessmentRequest.Annotation): - Optional. The annotation that will be + Optional. The annotation that is assigned to the Event. This field can be left empty to provide reasons that apply to an event without concluding whether @@ -1066,8 +1066,8 @@ def sample_create_key(): request (Union[google.cloud.recaptchaenterprise_v1.types.CreateKeyRequest, dict]): The request object. The create key request message. parent (str): - Required. The name of the project in which the key will - be created, in the format ``projects/{project}``. + Required. The name of the project in which the key is + created, in the format ``projects/{project}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1181,8 +1181,7 @@ def sample_list_keys(): The request object. The list keys request message. parent (str): Required. The name of the project that contains the keys - that will be listed, in the format - ``projects/{project}``. + that are listed, in the format ``projects/{project}``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1523,7 +1522,7 @@ def sample_update_key(): update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. The mask to control which fields of the key get updated. If the - mask is not present, all fields will be + mask is not present, all fields are updated. This corresponds to the ``update_mask`` field @@ -1765,6 +1764,126 @@ def sample_migrate_key(): # Done; return the response. return response + def add_ip_override( + self, + request: Optional[Union[recaptchaenterprise.AddIpOverrideRequest, dict]] = None, + *, + name: Optional[str] = None, + ip_override_data: Optional[recaptchaenterprise.IpOverrideData] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> recaptchaenterprise.AddIpOverrideResponse: + r"""Adds an IP override to a key. The following restrictions hold: + + - The maximum number of IP overrides per key is 100. + - For any conflict (such as IP already exists or IP part of an + existing IP range), an error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import recaptchaenterprise_v1 + + def sample_add_ip_override(): + # Create a client + client = recaptchaenterprise_v1.RecaptchaEnterpriseServiceClient() + + # Initialize request argument(s) + ip_override_data = recaptchaenterprise_v1.IpOverrideData() + ip_override_data.ip = "ip_value" + ip_override_data.override_type = "ALLOW" + + request = recaptchaenterprise_v1.AddIpOverrideRequest( + name="name_value", + ip_override_data=ip_override_data, + ) + + # Make the request + response = client.add_ip_override(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.recaptchaenterprise_v1.types.AddIpOverrideRequest, dict]): + The request object. The AddIpOverride request message. + name (str): + Required. The name of the key to which the IP override + is added, in the format + ``projects/{project}/keys/{key}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ip_override_data (google.cloud.recaptchaenterprise_v1.types.IpOverrideData): + Required. IP override added to the + key. + + This corresponds to the ``ip_override_data`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.recaptchaenterprise_v1.types.AddIpOverrideResponse: + Response for AddIpOverride. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, ip_override_data]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, recaptchaenterprise.AddIpOverrideRequest): + request = recaptchaenterprise.AddIpOverrideRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if ip_override_data is not None: + request.ip_override_data = ip_override_data + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_ip_override] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def get_metrics( self, request: Optional[Union[recaptchaenterprise.GetMetricsRequest, dict]] = None, @@ -2263,7 +2382,7 @@ def sample_update_firewall_policy(): update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. The mask to control which fields of the policy get updated. If the - mask is not present, all fields will be + mask is not present, all fields are updated. This corresponds to the ``update_mask`` field diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/base.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/base.py index 9d55defeb4e0..1d997dedd2b7 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/base.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/base.py @@ -174,6 +174,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.add_ip_override: gapic_v1.method.wrap_method( + self.add_ip_override, + default_timeout=None, + client_info=client_info, + ), self.get_metrics: gapic_v1.method.wrap_method( self.get_metrics, default_timeout=None, @@ -327,6 +332,18 @@ def migrate_key( ]: raise NotImplementedError() + @property + def add_ip_override( + self, + ) -> Callable[ + [recaptchaenterprise.AddIpOverrideRequest], + Union[ + recaptchaenterprise.AddIpOverrideResponse, + Awaitable[recaptchaenterprise.AddIpOverrideResponse], + ], + ]: + raise NotImplementedError() + @property def get_metrics( self, diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc.py index 7f19869103b9..bb598e78465f 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc.py @@ -491,6 +491,39 @@ def migrate_key( ) return self._stubs["migrate_key"] + @property + def add_ip_override( + self, + ) -> Callable[ + [recaptchaenterprise.AddIpOverrideRequest], + recaptchaenterprise.AddIpOverrideResponse, + ]: + r"""Return a callable for the add ip override method over gRPC. + + Adds an IP override to a key. The following restrictions hold: + + - The maximum number of IP overrides per key is 100. + - For any conflict (such as IP already exists or IP part of an + existing IP range), an error is returned. + + Returns: + Callable[[~.AddIpOverrideRequest], + ~.AddIpOverrideResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "add_ip_override" not in self._stubs: + self._stubs["add_ip_override"] = self.grpc_channel.unary_unary( + "/google.cloud.recaptchaenterprise.v1.RecaptchaEnterpriseService/AddIpOverride", + request_serializer=recaptchaenterprise.AddIpOverrideRequest.serialize, + response_deserializer=recaptchaenterprise.AddIpOverrideResponse.deserialize, + ) + return self._stubs["add_ip_override"] + @property def get_metrics( self, diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc_asyncio.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc_asyncio.py index ef36498d9935..00886fe8df24 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/services/recaptcha_enterprise_service/transports/grpc_asyncio.py @@ -507,6 +507,39 @@ def migrate_key( ) return self._stubs["migrate_key"] + @property + def add_ip_override( + self, + ) -> Callable[ + [recaptchaenterprise.AddIpOverrideRequest], + Awaitable[recaptchaenterprise.AddIpOverrideResponse], + ]: + r"""Return a callable for the add ip override method over gRPC. + + Adds an IP override to a key. The following restrictions hold: + + - The maximum number of IP overrides per key is 100. + - For any conflict (such as IP already exists or IP part of an + existing IP range), an error is returned. + + Returns: + Callable[[~.AddIpOverrideRequest], + Awaitable[~.AddIpOverrideResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "add_ip_override" not in self._stubs: + self._stubs["add_ip_override"] = self.grpc_channel.unary_unary( + "/google.cloud.recaptchaenterprise.v1.RecaptchaEnterpriseService/AddIpOverride", + request_serializer=recaptchaenterprise.AddIpOverrideRequest.serialize, + response_deserializer=recaptchaenterprise.AddIpOverrideResponse.deserialize, + ) + return self._stubs["add_ip_override"] + @property def get_metrics( self, @@ -853,6 +886,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.add_ip_override: gapic_v1.method_async.wrap_method( + self.add_ip_override, + default_timeout=None, + client_info=client_info, + ), self.get_metrics: gapic_v1.method_async.wrap_method( self.get_metrics, default_timeout=None, diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/__init__.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/__init__.py index c035b7638c0e..230f8d821762 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/__init__.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/__init__.py @@ -16,11 +16,14 @@ from .recaptchaenterprise import ( AccountDefenderAssessment, AccountVerificationInfo, + AddIpOverrideRequest, + AddIpOverrideResponse, AndroidKeySettings, AnnotateAssessmentRequest, AnnotateAssessmentResponse, AppleDeveloperId, Assessment, + AssessmentEnvironment, ChallengeMetrics, CreateAssessmentRequest, CreateFirewallPolicyRequest, @@ -29,6 +32,7 @@ DeleteKeyRequest, EndpointVerificationInfo, Event, + ExpressKeySettings, FirewallAction, FirewallPolicy, FirewallPolicyAssessment, @@ -38,6 +42,7 @@ GetKeyRequest, GetMetricsRequest, IOSKeySettings, + IpOverrideData, Key, ListFirewallPoliciesRequest, ListFirewallPoliciesResponse, @@ -78,11 +83,14 @@ __all__ = ( "AccountDefenderAssessment", "AccountVerificationInfo", + "AddIpOverrideRequest", + "AddIpOverrideResponse", "AndroidKeySettings", "AnnotateAssessmentRequest", "AnnotateAssessmentResponse", "AppleDeveloperId", "Assessment", + "AssessmentEnvironment", "ChallengeMetrics", "CreateAssessmentRequest", "CreateFirewallPolicyRequest", @@ -91,6 +99,7 @@ "DeleteKeyRequest", "EndpointVerificationInfo", "Event", + "ExpressKeySettings", "FirewallAction", "FirewallPolicy", "FirewallPolicyAssessment", @@ -100,6 +109,7 @@ "GetKeyRequest", "GetMetricsRequest", "IOSKeySettings", + "IpOverrideData", "Key", "ListFirewallPoliciesRequest", "ListFirewallPoliciesResponse", diff --git a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py index 8f4b9a655592..129df8657cdd 100644 --- a/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py +++ b/packages/google-cloud-recaptcha-enterprise/google/cloud/recaptchaenterprise_v1/types/recaptchaenterprise.py @@ -68,6 +68,7 @@ "WebKeySettings", "AndroidKeySettings", "IOSKeySettings", + "ExpressKeySettings", "AppleDeveloperId", "ScoreDistribution", "ScoreMetrics", @@ -81,9 +82,13 @@ "ListRelatedAccountGroupsResponse", "SearchRelatedAccountGroupMembershipsRequest", "SearchRelatedAccountGroupMembershipsResponse", + "AddIpOverrideRequest", + "AddIpOverrideResponse", "RelatedAccountGroupMembership", "RelatedAccountGroup", "WafSettings", + "AssessmentEnvironment", + "IpOverrideData", }, ) @@ -93,8 +98,8 @@ class CreateAssessmentRequest(proto.Message): Attributes: parent (str): - Required. The name of the project in which the assessment - will be created, in the format ``projects/{project}``. + Required. The name of the project in which the assessment is + created, in the format ``projects/{project}``. assessment (google.cloud.recaptchaenterprise_v1.types.Assessment): Required. The assessment details. """ @@ -277,11 +282,11 @@ class AnnotateAssessmentRequest(proto.Message): Required. The resource name of the Assessment, in the format ``projects/{project}/assessments/{assessment}``. annotation (google.cloud.recaptchaenterprise_v1.types.AnnotateAssessmentRequest.Annotation): - Optional. The annotation that will be - assigned to the Event. This field can be left - empty to provide reasons that apply to an event - without concluding whether the event is - legitimate or fraudulent. + Optional. The annotation that is assigned to + the Event. This field can be left empty to + provide reasons that apply to an event without + concluding whether the event is legitimate or + fraudulent. reasons (MutableSequence[google.cloud.recaptchaenterprise_v1.types.AnnotateAssessmentRequest.Reason]): Optional. Reasons for the annotation that are assigned to the event. @@ -680,6 +685,11 @@ class Assessment(proto.Message): Output only. Assessment returned when a site key, a token, and a phone number as ``user_id`` are provided. Account defender and SMS toll fraud protection need to be enabled. + assessment_environment (google.cloud.recaptchaenterprise_v1.types.AssessmentEnvironment): + Optional. The environment creating the + assessment. This describes your environment (the + system invoking CreateAssessment), NOT the + environment of your user. """ name: str = proto.Field( @@ -736,6 +746,11 @@ class Assessment(proto.Message): number=12, message="PhoneFraudAssessment", ) + assessment_environment: "AssessmentEnvironment" = proto.Field( + proto.MESSAGE, + number=14, + message="AssessmentEnvironment", + ) class Event(proto.Message): @@ -771,7 +786,7 @@ class Event(proto.Message): express (bool): Optional. Flag for a reCAPTCHA express request for an assessment without a token. If enabled, ``site_key`` must - reference a SCORE key with WAF feature set to EXPRESS. + reference an express key. requested_uri (str): Optional. The URI resource the user requested that triggered an assessment. @@ -788,15 +803,14 @@ class Event(proto.Message): firewall_policy_evaluation (bool): Optional. Flag for enabling firewall policy config assessment. If this flag is enabled, the - firewall policy will be evaluated and a - suggested firewall action will be returned in - the response. + firewall policy is evaluated and a suggested + firewall action is returned in the response. transaction_data (google.cloud.recaptchaenterprise_v1.types.TransactionData): Optional. Data describing a payment transaction to be assessed. Sending this data - enables reCAPTCHA Enterprise Fraud Prevention - and the FraudPreventionAssessment component in - the response. + enables reCAPTCHA Fraud Prevention and the + FraudPreventionAssessment component in the + response. user_info (google.cloud.recaptchaenterprise_v1.types.UserInfo): Optional. Information about the user that generates this event, when they can be @@ -1728,7 +1742,7 @@ class CreateKeyRequest(proto.Message): Attributes: parent (str): - Required. The name of the project in which the key will be + Required. The name of the project in which the key is created, in the format ``projects/{project}``. key (google.cloud.recaptchaenterprise_v1.types.Key): Required. Information to create a reCAPTCHA @@ -1752,7 +1766,7 @@ class ListKeysRequest(proto.Message): Attributes: parent (str): Required. The name of the project that contains the keys - that will be listed, in the format ``projects/{project}``. + that are listed, in the format ``projects/{project}``. page_size (int): Optional. The maximum number of keys to return. Default is 10. Max limit is 1000. @@ -1840,7 +1854,7 @@ class UpdateKeyRequest(proto.Message): update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. The mask to control which fields of the key get updated. If the mask is not present, - all fields will be updated. + all fields are updated. """ key: "Key" = proto.Field( @@ -1973,7 +1987,7 @@ class UpdateFirewallPolicyRequest(proto.Message): update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. The mask to control which fields of the policy get updated. If the mask is not - present, all fields will be updated. + present, all fields are updated. """ firewall_policy: "FirewallPolicy" = proto.Field( @@ -2043,12 +2057,12 @@ class MigrateKeyRequest(proto.Message): Enterprise key or migrated key behaves differently than a reCAPTCHA (non-Enterprise version) key when you reach a quota limit (see - https://cloud.google.com/recaptcha-enterprise/quotas#quota_limit). - To avoid any disruption of your usage, we check that a - billing account is present. If your usage of reCAPTCHA is - under the free quota, you can safely skip the billing check - and proceed with the migration. See - https://cloud.google.com/recaptcha-enterprise/docs/billing-information. + https://cloud.google.com/recaptcha/quotas#quota_limit). To + avoid any disruption of your usage, we check that a billing + account is present. If your usage of reCAPTCHA is under the + free quota, you can safely skip the billing check and + proceed with the migration. See + https://cloud.google.com/recaptcha/docs/billing-information. """ name: str = proto.Field( @@ -2086,12 +2100,12 @@ class Metrics(proto.Message): start_time (google.protobuf.timestamp_pb2.Timestamp): Inclusive start time aligned to a day (UTC). score_metrics (MutableSequence[google.cloud.recaptchaenterprise_v1.types.ScoreMetrics]): - Metrics will be continuous and in order by - dates, and in the granularity of day. All Key - types should have score-based data. + Metrics are continuous and in order by dates, + and in the granularity of day. All Key types + should have score-based data. challenge_metrics (MutableSequence[google.cloud.recaptchaenterprise_v1.types.ChallengeMetrics]): - Metrics will be continuous and in order by - dates, and in the granularity of day. Only + Metrics are continuous and in order by dates, + and in the granularity of day. Only challenge-based keys (CHECKBOX, INVISIBLE), will have challenge-based data. """ @@ -2169,10 +2183,15 @@ class Key(proto.Message): Settings for keys that can be used by iOS apps. + This field is a member of `oneof`_ ``platform_settings``. + express_settings (google.cloud.recaptchaenterprise_v1.types.ExpressKeySettings): + Settings specific to keys that can be used + for reCAPTCHA Express. + This field is a member of `oneof`_ ``platform_settings``. labels (MutableMapping[str, str]): Optional. See [Creating and managing labels] - (https://cloud.google.com/recaptcha-enterprise/docs/labels). + (https://cloud.google.com/recaptcha/docs/labels). create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The timestamp corresponding to the creation of this key. @@ -2209,6 +2228,12 @@ class Key(proto.Message): oneof="platform_settings", message="IOSKeySettings", ) + express_settings: "ExpressKeySettings" = proto.Field( + proto.MESSAGE, + number=11, + oneof="platform_settings", + message="ExpressKeySettings", + ) labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, @@ -2449,6 +2474,13 @@ class IOSKeySettings(proto.Message): ) +class ExpressKeySettings(proto.Message): + r"""Settings specific to keys that can be used for reCAPTCHA + Express. + + """ + + class AppleDeveloperId(proto.Message): r"""Contains fields that are required to perform Apple-specific integrity checks. @@ -2572,13 +2604,12 @@ class FirewallPolicyAssessment(proto.Message): Attributes: error (google.rpc.status_pb2.Status): Output only. If the processing of a policy config fails, an - error will be populated and the firewall_policy will be left - empty. + error is populated and the firewall_policy is left empty. firewall_policy (google.cloud.recaptchaenterprise_v1.types.FirewallPolicy): Output only. The policy that matched the request. If more than one policy may match, this is the first match. If no policy matches the - incoming request, the policy field will be left + incoming request, the policy field is left empty. """ @@ -3032,6 +3063,32 @@ def raw_page(self): ) +class AddIpOverrideRequest(proto.Message): + r"""The AddIpOverride request message. + + Attributes: + name (str): + Required. The name of the key to which the IP override is + added, in the format ``projects/{project}/keys/{key}``. + ip_override_data (google.cloud.recaptchaenterprise_v1.types.IpOverrideData): + Required. IP override added to the key. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + ip_override_data: "IpOverrideData" = proto.Field( + proto.MESSAGE, + number=2, + message="IpOverrideData", + ) + + +class AddIpOverrideResponse(proto.Message): + r"""Response for AddIpOverride.""" + + class RelatedAccountGroupMembership(proto.Message): r"""A membership in a group of related accounts. @@ -3097,7 +3154,7 @@ class WafSettings(proto.Message): class WafFeature(proto.Enum): r"""Supported WAF features. For more information, see - https://cloud.google.com/recaptcha-enterprise/docs/usecase#comparison_of_features. + https://cloud.google.com/recaptcha/docs/usecase#comparison_of_features. Values: WAF_FEATURE_UNSPECIFIED (0): @@ -3151,4 +3208,81 @@ class WafService(proto.Enum): ) +class AssessmentEnvironment(proto.Message): + r"""The environment creating the assessment. This describes your + environment (the system invoking CreateAssessment), NOT the + environment of your user. + + Attributes: + client (str): + Optional. Identifies the client module + initiating the CreateAssessment request. This + can be the link to the client module's project. + Examples include: + + - + "github.com/GoogleCloudPlatform/recaptcha-enterprise-google-tag-manager" + - + "cloud.google.com/recaptcha/docs/implement-waf-akamai" + - + "cloud.google.com/recaptcha/docs/implement-waf-cloudflare" + - "wordpress.org/plugins/recaptcha-something". + version (str): + Optional. The version of the client module. + For example, "1.0.0". + """ + + client: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + + +class IpOverrideData(proto.Message): + r"""Information about the IP or IP range override. + + Attributes: + ip (str): + Required. The IP address to override (can be + IPv4, IPv6 or CIDR). The IP override must be a + valid IPv4 or IPv6 address, or a CIDR range. The + IP override must be a public IP address. + Example of IPv4: 168.192.5.6 + Example of IPv6: + 2001:0000:130F:0000:0000:09C0:876A:130B Example + of IPv4 with CIDR: 168.192.5.0/24 + Example of IPv6 with CIDR: 2001:0DB8:1234::/48 + override_type (google.cloud.recaptchaenterprise_v1.types.IpOverrideData.OverrideType): + Required. Describes the type of IP override. + """ + + class OverrideType(proto.Enum): + r"""Enum that represents the type of IP override. + + Values: + OVERRIDE_TYPE_UNSPECIFIED (0): + Default override type that indicates this + enum hasn't been specified. + ALLOW (1): + Allowlist the IP address; i.e. give a + ``risk_analysis.score`` of 0.9 for all valid assessments. + """ + OVERRIDE_TYPE_UNSPECIFIED = 0 + ALLOW = 1 + + ip: str = proto.Field( + proto.STRING, + number=1, + ) + override_type: OverrideType = proto.Field( + proto.ENUM, + number=3, + enum=OverrideType, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_async.py b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_async.py new file mode 100644 index 000000000000..0431a12687a1 --- /dev/null +++ b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddIpOverride +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-recaptcha-enterprise + + +# [START recaptchaenterprise_v1_generated_RecaptchaEnterpriseService_AddIpOverride_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import recaptchaenterprise_v1 + + +async def sample_add_ip_override(): + # Create a client + client = recaptchaenterprise_v1.RecaptchaEnterpriseServiceAsyncClient() + + # Initialize request argument(s) + ip_override_data = recaptchaenterprise_v1.IpOverrideData() + ip_override_data.ip = "ip_value" + ip_override_data.override_type = "ALLOW" + + request = recaptchaenterprise_v1.AddIpOverrideRequest( + name="name_value", + ip_override_data=ip_override_data, + ) + + # Make the request + response = await client.add_ip_override(request=request) + + # Handle the response + print(response) + +# [END recaptchaenterprise_v1_generated_RecaptchaEnterpriseService_AddIpOverride_async] diff --git a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_sync.py b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_sync.py new file mode 100644 index 000000000000..f5727a9c8ee8 --- /dev/null +++ b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddIpOverride +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-recaptcha-enterprise + + +# [START recaptchaenterprise_v1_generated_RecaptchaEnterpriseService_AddIpOverride_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import recaptchaenterprise_v1 + + +def sample_add_ip_override(): + # Create a client + client = recaptchaenterprise_v1.RecaptchaEnterpriseServiceClient() + + # Initialize request argument(s) + ip_override_data = recaptchaenterprise_v1.IpOverrideData() + ip_override_data.ip = "ip_value" + ip_override_data.override_type = "ALLOW" + + request = recaptchaenterprise_v1.AddIpOverrideRequest( + name="name_value", + ip_override_data=ip_override_data, + ) + + # Make the request + response = client.add_ip_override(request=request) + + # Handle the response + print(response) + +# [END recaptchaenterprise_v1_generated_RecaptchaEnterpriseService_AddIpOverride_sync] diff --git a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json index aa554428ea59..225f39c961dd 100644 --- a/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json +++ b/packages/google-cloud-recaptcha-enterprise/samples/generated_samples/snippet_metadata_google.cloud.recaptchaenterprise.v1.json @@ -8,9 +8,178 @@ ], "language": "PYTHON", "name": "google-cloud-recaptcha-enterprise", - "version": "0.1.0" + "version": "1.22.1" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.recaptchaenterprise_v1.RecaptchaEnterpriseServiceAsyncClient", + "shortName": "RecaptchaEnterpriseServiceAsyncClient" + }, + "fullName": "google.cloud.recaptchaenterprise_v1.RecaptchaEnterpriseServiceAsyncClient.add_ip_override", + "method": { + "fullName": "google.cloud.recaptchaenterprise.v1.RecaptchaEnterpriseService.AddIpOverride", + "service": { + "fullName": "google.cloud.recaptchaenterprise.v1.RecaptchaEnterpriseService", + "shortName": "RecaptchaEnterpriseService" + }, + "shortName": "AddIpOverride" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.recaptchaenterprise_v1.types.AddIpOverrideRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "ip_override_data", + "type": "google.cloud.recaptchaenterprise_v1.types.IpOverrideData" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.recaptchaenterprise_v1.types.AddIpOverrideResponse", + "shortName": "add_ip_override" + }, + "description": "Sample for AddIpOverride", + "file": "recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "recaptchaenterprise_v1_generated_RecaptchaEnterpriseService_AddIpOverride_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.recaptchaenterprise_v1.RecaptchaEnterpriseServiceClient", + "shortName": "RecaptchaEnterpriseServiceClient" + }, + "fullName": "google.cloud.recaptchaenterprise_v1.RecaptchaEnterpriseServiceClient.add_ip_override", + "method": { + "fullName": "google.cloud.recaptchaenterprise.v1.RecaptchaEnterpriseService.AddIpOverride", + "service": { + "fullName": "google.cloud.recaptchaenterprise.v1.RecaptchaEnterpriseService", + "shortName": "RecaptchaEnterpriseService" + }, + "shortName": "AddIpOverride" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.recaptchaenterprise_v1.types.AddIpOverrideRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "ip_override_data", + "type": "google.cloud.recaptchaenterprise_v1.types.IpOverrideData" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.recaptchaenterprise_v1.types.AddIpOverrideResponse", + "shortName": "add_ip_override" + }, + "description": "Sample for AddIpOverride", + "file": "recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "recaptchaenterprise_v1_generated_RecaptchaEnterpriseService_AddIpOverride_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "recaptchaenterprise_v1_generated_recaptcha_enterprise_service_add_ip_override_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-recaptcha-enterprise/scripts/fixup_recaptchaenterprise_v1_keywords.py b/packages/google-cloud-recaptcha-enterprise/scripts/fixup_recaptchaenterprise_v1_keywords.py index 59c889be9ff2..36e16174f78a 100644 --- a/packages/google-cloud-recaptcha-enterprise/scripts/fixup_recaptchaenterprise_v1_keywords.py +++ b/packages/google-cloud-recaptcha-enterprise/scripts/fixup_recaptchaenterprise_v1_keywords.py @@ -39,6 +39,7 @@ def partition( class recaptchaenterpriseCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'add_ip_override': ('name', 'ip_override_data', ), 'annotate_assessment': ('name', 'annotation', 'reasons', 'account_id', 'hashed_account_id', 'transaction_event', ), 'create_assessment': ('parent', 'assessment', ), 'create_firewall_policy': ('parent', 'firewall_policy', ), diff --git a/packages/google-cloud-recaptcha-enterprise/tests/unit/gapic/recaptchaenterprise_v1/test_recaptcha_enterprise_service.py b/packages/google-cloud-recaptcha-enterprise/tests/unit/gapic/recaptchaenterprise_v1/test_recaptcha_enterprise_service.py index 2134315d5a12..9230f6fa88c3 100644 --- a/packages/google-cloud-recaptcha-enterprise/tests/unit/gapic/recaptchaenterprise_v1/test_recaptcha_enterprise_service.py +++ b/packages/google-cloud-recaptcha-enterprise/tests/unit/gapic/recaptchaenterprise_v1/test_recaptcha_enterprise_service.py @@ -4646,6 +4646,374 @@ async def test_migrate_key_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + recaptchaenterprise.AddIpOverrideRequest, + dict, + ], +) +def test_add_ip_override(request_type, transport: str = "grpc"): + client = RecaptchaEnterpriseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = recaptchaenterprise.AddIpOverrideResponse() + response = client.add_ip_override(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = recaptchaenterprise.AddIpOverrideRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, recaptchaenterprise.AddIpOverrideResponse) + + +def test_add_ip_override_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RecaptchaEnterpriseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.add_ip_override() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == recaptchaenterprise.AddIpOverrideRequest() + + +def test_add_ip_override_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RecaptchaEnterpriseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = recaptchaenterprise.AddIpOverrideRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.add_ip_override(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == recaptchaenterprise.AddIpOverrideRequest( + name="name_value", + ) + + +def test_add_ip_override_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RecaptchaEnterpriseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_ip_override in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_ip_override] = mock_rpc + request = {} + client.add_ip_override(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.add_ip_override(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_add_ip_override_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = RecaptchaEnterpriseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + recaptchaenterprise.AddIpOverrideResponse() + ) + response = await client.add_ip_override() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == recaptchaenterprise.AddIpOverrideRequest() + + +@pytest.mark.asyncio +async def test_add_ip_override_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RecaptchaEnterpriseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.add_ip_override + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.add_ip_override + ] = mock_rpc + + request = {} + await client.add_ip_override(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.add_ip_override(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_add_ip_override_async( + transport: str = "grpc_asyncio", + request_type=recaptchaenterprise.AddIpOverrideRequest, +): + client = RecaptchaEnterpriseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + recaptchaenterprise.AddIpOverrideResponse() + ) + response = await client.add_ip_override(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = recaptchaenterprise.AddIpOverrideRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, recaptchaenterprise.AddIpOverrideResponse) + + +@pytest.mark.asyncio +async def test_add_ip_override_async_from_dict(): + await test_add_ip_override_async(request_type=dict) + + +def test_add_ip_override_field_headers(): + client = RecaptchaEnterpriseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = recaptchaenterprise.AddIpOverrideRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + call.return_value = recaptchaenterprise.AddIpOverrideResponse() + client.add_ip_override(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_add_ip_override_field_headers_async(): + client = RecaptchaEnterpriseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = recaptchaenterprise.AddIpOverrideRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + recaptchaenterprise.AddIpOverrideResponse() + ) + await client.add_ip_override(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_add_ip_override_flattened(): + client = RecaptchaEnterpriseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = recaptchaenterprise.AddIpOverrideResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.add_ip_override( + name="name_value", + ip_override_data=recaptchaenterprise.IpOverrideData(ip="ip_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].ip_override_data + mock_val = recaptchaenterprise.IpOverrideData(ip="ip_value") + assert arg == mock_val + + +def test_add_ip_override_flattened_error(): + client = RecaptchaEnterpriseServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_ip_override( + recaptchaenterprise.AddIpOverrideRequest(), + name="name_value", + ip_override_data=recaptchaenterprise.IpOverrideData(ip="ip_value"), + ) + + +@pytest.mark.asyncio +async def test_add_ip_override_flattened_async(): + client = RecaptchaEnterpriseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.add_ip_override), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = recaptchaenterprise.AddIpOverrideResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + recaptchaenterprise.AddIpOverrideResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.add_ip_override( + name="name_value", + ip_override_data=recaptchaenterprise.IpOverrideData(ip="ip_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].ip_override_data + mock_val = recaptchaenterprise.IpOverrideData(ip="ip_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_add_ip_override_flattened_error_async(): + client = RecaptchaEnterpriseServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.add_ip_override( + recaptchaenterprise.AddIpOverrideRequest(), + name="name_value", + ip_override_data=recaptchaenterprise.IpOverrideData(ip="ip_value"), + ) + + @pytest.mark.parametrize( "request_type", [ @@ -9582,6 +9950,7 @@ def test_recaptcha_enterprise_service_base_transport(): "update_key", "delete_key", "migrate_key", + "add_ip_override", "get_metrics", "create_firewall_policy", "list_firewall_policies", diff --git a/packages/google-cloud-service-management/CHANGELOG.md b/packages/google-cloud-service-management/CHANGELOG.md index c62d63a5255d..40b0c2033509 100644 --- a/packages/google-cloud-service-management/CHANGELOG.md +++ b/packages/google-cloud-service-management/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.9.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-service-management-v1.8.5...google-cloud-service-management-v1.9.0) (2024-09-03) + + +### Features + +* Support local binding for variables with keyword name collision ([c54700d](https://github.com/googleapis/google-cloud-python/commit/c54700d3e11e59eb5fae01fda25dbf3a9acbe382)) + ## [1.8.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-service-management-v1.8.4...google-cloud-service-management-v1.8.5) (2024-07-30) diff --git a/packages/google-cloud-service-management/google/cloud/servicemanagement/gapic_version.py b/packages/google-cloud-service-management/google/cloud/servicemanagement/gapic_version.py index 558c8aab67c5..1c08bcbd1569 100644 --- a/packages/google-cloud-service-management/google/cloud/servicemanagement/gapic_version.py +++ b/packages/google-cloud-service-management/google/cloud/servicemanagement/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.9.0" # {x-release-please-version} diff --git a/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/gapic_version.py b/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/gapic_version.py index 558c8aab67c5..1c08bcbd1569 100644 --- a/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/gapic_version.py +++ b/packages/google-cloud-service-management/google/cloud/servicemanagement_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.9.0" # {x-release-please-version} diff --git a/packages/google-cloud-service-management/samples/generated_samples/snippet_metadata_google.api.servicemanagement.v1.json b/packages/google-cloud-service-management/samples/generated_samples/snippet_metadata_google.api.servicemanagement.v1.json index 8b514f320127..f108a0c7b5e1 100644 --- a/packages/google-cloud-service-management/samples/generated_samples/snippet_metadata_google.api.servicemanagement.v1.json +++ b/packages/google-cloud-service-management/samples/generated_samples/snippet_metadata_google.api.servicemanagement.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-service-management", - "version": "0.1.0" + "version": "1.9.0" }, "snippets": [ { diff --git a/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py b/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py index 12b5f383294f..10c05a61ed21 100644 --- a/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py +++ b/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py @@ -9286,7 +9286,10 @@ def test_create_service_config_rest(request_type): }, "cpp_settings": {"common": {}}, "php_settings": {"common": {}}, - "python_settings": {"common": {}}, + "python_settings": { + "common": {}, + "experimental_features": {"rest_async_io_enabled": True}, + }, "node_settings": {"common": {}}, "dotnet_settings": { "common": {}, diff --git a/packages/google-cloud-texttospeech/CHANGELOG.md b/packages/google-cloud-texttospeech/CHANGELOG.md index 14211a0b99aa..0ebabf8433ec 100644 --- a/packages/google-cloud-texttospeech/CHANGELOG.md +++ b/packages/google-cloud-texttospeech/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-texttospeech/#history +## [2.17.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-texttospeech-v2.17.1...google-cloud-texttospeech-v2.17.2) (2024-08-26) + + +### Documentation + +* [google-cloud-texttospeech] update Long Audio capabilities to include SSML ([#13031](https://github.com/googleapis/google-cloud-python/issues/13031)) ([46ef325](https://github.com/googleapis/google-cloud-python/commit/46ef3250151c5939100ab4a1dcda6ccf68ea9e4c)) + ## [2.17.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-texttospeech-v2.17.0...google-cloud-texttospeech-v2.17.1) (2024-08-20) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py index 6fecc94eb049..b2378ad73fca 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.1" # {x-release-please-version} +__version__ = "2.17.2" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py index 6fecc94eb049..b2378ad73fca 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.1" # {x-release-please-version} +__version__ = "2.17.2" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py index 6fecc94eb049..b2378ad73fca 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.1" # {x-release-please-version} +__version__ = "2.17.2" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts_lrs.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts_lrs.py index 7617ef4b5f77..67b70c095524 100644 --- a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts_lrs.py +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts_lrs.py @@ -42,8 +42,7 @@ class SynthesizeLongAudioRequest(proto.Message): ``projects/*/locations/*``. input (google.cloud.texttospeech_v1beta1.types.SynthesisInput): Required. The Synthesizer requires either - plain text or SSML as input. While Long Audio is - in preview, SSML is temporarily unsupported. + plain text or SSML as input. audio_config (google.cloud.texttospeech_v1beta1.types.AudioConfig): Required. The configuration of the synthesized audio. diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json index ac836bbdf339..bf52a9d09886 100644 --- a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json +++ b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-texttospeech", - "version": "2.17.1" + "version": "2.17.2" }, "snippets": [ { diff --git a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json index c0a9e929493b..f3e0f230302f 100644 --- a/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json +++ b/packages/google-cloud-texttospeech/samples/generated_samples/snippet_metadata_google.cloud.texttospeech.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-texttospeech", - "version": "2.17.1" + "version": "2.17.2" }, "snippets": [ { diff --git a/packages/google-cloud-visionai/CHANGELOG.md b/packages/google-cloud-visionai/CHANGELOG.md index 55e12017ac55..ef410baf042c 100644 --- a/packages/google-cloud-visionai/CHANGELOG.md +++ b/packages/google-cloud-visionai/CHANGELOG.md @@ -1,5 +1,22 @@ # Changelog +## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-visionai-v0.1.2...google-cloud-visionai-v0.1.3) (2024-09-03) + + +### Features + +* add BatchOperationStatus to import metadata ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) +* request client libraries for new languages ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) + + +### Documentation + +* A comment for enum value `FAILED` in enum `State` is changed ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) +* A comment for enum value `IN_PROGRESS` in enum `State` is changed ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) +* A comment for enum value `SUCCEEDED` in enum `State` is changed ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) +* A comment for field `relevance` in message `.google.cloud.visionai.v1.SearchResultItem` is changed ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) +* A comment for method `ClipAsset` in service `Warehouse` is changed ([0321915](https://github.com/googleapis/google-cloud-python/commit/0321915e31c12f24e96b778b5b3814507ff547d6)) + ## [0.1.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-visionai-v0.1.1...google-cloud-visionai-v0.1.2) (2024-07-30) diff --git a/packages/google-cloud-visionai/google/cloud/visionai/__init__.py b/packages/google-cloud-visionai/google/cloud/visionai/__init__.py index ccaffd1a09fa..1849c4efa2ad 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai/__init__.py +++ b/packages/google-cloud-visionai/google/cloud/visionai/__init__.py @@ -276,6 +276,7 @@ AnnotationValue, Asset, AssetSource, + BatchOperationStatus, BoolValue, CircleArea, ClipAssetRequest, @@ -640,6 +641,7 @@ "AnnotationValue", "Asset", "AssetSource", + "BatchOperationStatus", "BoolValue", "CircleArea", "ClipAssetRequest", diff --git a/packages/google-cloud-visionai/google/cloud/visionai/gapic_version.py b/packages/google-cloud-visionai/google/cloud/visionai/gapic_version.py index 558c8aab67c5..114e40645800 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai/gapic_version.py +++ b/packages/google-cloud-visionai/google/cloud/visionai/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/__init__.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/__init__.py index 82e8e3764093..732d7a958c52 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/__init__.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/__init__.py @@ -250,6 +250,7 @@ AnnotationValue, Asset, AssetSource, + BatchOperationStatus, BoolValue, CircleArea, ClipAssetRequest, @@ -432,6 +433,7 @@ "AssetSource", "AttributeValue", "AutoscalingMetricSpec", + "BatchOperationStatus", "BatchRunProcessRequest", "BatchRunProcessResponse", "BigQueryConfig", diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/gapic_version.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/gapic_version.py index 558c8aab67c5..114e40645800 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/gapic_version.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/async_client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/async_client.py index e33e38726d0e..85335ef56434 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/async_client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/async_client.py @@ -4057,11 +4057,12 @@ async def clip_asset( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> warehouse.ClipAssetResponse: - r"""Generates clips for downloading. The api takes in a time range, - and generates a clip of the first content available after - start_time and before end_time, which may overflow beyond these - bounds. Returned clips are truncated if the total size of the - clips are larger than 100MB. + r"""Supported by STREAM_VIDEO corpus type. Generates clips for + downloading. The api takes in a time range, and generates a clip + of the first content available after start_time and before + end_time, which may overflow beyond these bounds. Returned clips + are truncated if the total size of the clips are larger than + 100MB. .. code-block:: python diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/client.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/client.py index a7b0f4fa4c78..a8da521a4414 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/client.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/client.py @@ -4562,11 +4562,12 @@ def clip_asset( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> warehouse.ClipAssetResponse: - r"""Generates clips for downloading. The api takes in a time range, - and generates a clip of the first content available after - start_time and before end_time, which may overflow beyond these - bounds. Returned clips are truncated if the total size of the - clips are larger than 100MB. + r"""Supported by STREAM_VIDEO corpus type. Generates clips for + downloading. The api takes in a time range, and generates a clip + of the first content available after start_time and before + end_time, which may overflow beyond these bounds. Returned clips + are truncated if the total size of the clips are larger than + 100MB. .. code-block:: python diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/transports/grpc.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/transports/grpc.py index df1b049eebc6..372e53e0beca 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/transports/grpc.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/transports/grpc.py @@ -1142,11 +1142,12 @@ def clip_asset( ) -> Callable[[warehouse.ClipAssetRequest], warehouse.ClipAssetResponse]: r"""Return a callable for the clip asset method over gRPC. - Generates clips for downloading. The api takes in a time range, - and generates a clip of the first content available after - start_time and before end_time, which may overflow beyond these - bounds. Returned clips are truncated if the total size of the - clips are larger than 100MB. + Supported by STREAM_VIDEO corpus type. Generates clips for + downloading. The api takes in a time range, and generates a clip + of the first content available after start_time and before + end_time, which may overflow beyond these bounds. Returned clips + are truncated if the total size of the clips are larger than + 100MB. Returns: Callable[[~.ClipAssetRequest], diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/transports/grpc_asyncio.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/transports/grpc_asyncio.py index d7eef7e7e00e..dccfe2920d8b 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/transports/grpc_asyncio.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/services/warehouse/transports/grpc_asyncio.py @@ -1172,11 +1172,12 @@ def clip_asset( ) -> Callable[[warehouse.ClipAssetRequest], Awaitable[warehouse.ClipAssetResponse]]: r"""Return a callable for the clip asset method over gRPC. - Generates clips for downloading. The api takes in a time range, - and generates a clip of the first content available after - start_time and before end_time, which may overflow beyond these - bounds. Returned clips are truncated if the total size of the - clips are larger than 100MB. + Supported by STREAM_VIDEO corpus type. Generates clips for + downloading. The api takes in a time range, and generates a clip + of the first content available after start_time and before + end_time, which may overflow beyond these bounds. Returned clips + are truncated if the total size of the clips are larger than + 100MB. Returns: Callable[[~.ClipAssetRequest], diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/types/__init__.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/types/__init__.py index df36023a838c..08ebd9fab538 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/types/__init__.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/types/__init__.py @@ -230,6 +230,7 @@ AnnotationValue, Asset, AssetSource, + BatchOperationStatus, BoolValue, CircleArea, ClipAssetRequest, @@ -582,6 +583,7 @@ "AnnotationValue", "Asset", "AssetSource", + "BatchOperationStatus", "BoolValue", "CircleArea", "ClipAssetRequest", diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1/types/warehouse.py b/packages/google-cloud-visionai/google/cloud/visionai_v1/types/warehouse.py index 92451e6b9b0e..a236ffe33e3e 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1/types/warehouse.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1/types/warehouse.py @@ -120,6 +120,7 @@ "DeleteAnnotationRequest", "ImportAssetsRequest", "ImportAssetsMetadata", + "BatchOperationStatus", "ImportAssetsResponse", "CreateSearchConfigRequest", "UpdateSearchConfigRequest", @@ -607,11 +608,11 @@ class State(proto.Enum): The default process state should never happen. IN_PROGRESS (1): - The feature is in progress. + The ml model analysis is in progress. SUCCEEDED (2): - The process is successfully done. + The ml model analysis is successfully done. FAILED (3): - The process failed. + The ml model analysis failed. """ STATE_UNSPECIFIED = 0 IN_PROGRESS = 1 @@ -1629,6 +1630,22 @@ class Index(proto.Message): Index of VIDEO_ON_DEMAND corpus can have at most one deployed index. Index of IMAGE corpus can have multiple deployed indexes. + satisfies_pzs (bool): + Output only. This boolean field is only set + for projects that have Physical Zone Separation + enabled via an Org Policy constraint. It is set + to true when the index is a valid zone separated + index and false if it isn't. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + satisfies_pzi (bool): + Output only. This boolean field is only set + for projects that have Physical Zone Isolation + enabled via an Org Policy constraint. It is set + to true when the index is a valid zone isolated + index and false if it isn't. + + This field is a member of `oneof`_ ``_satisfies_pzi``. """ class State(proto.Enum): @@ -1687,6 +1704,16 @@ class State(proto.Enum): number=8, message="DeployedIndexReference", ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=11, + optional=True, + ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=12, + optional=True, + ) class DeployedIndexReference(proto.Message): @@ -1709,6 +1736,9 @@ class Corpus(proto.Message): Within a corpus, media shares the same data schema. Search is also restricted within a single corpus. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Resource name of the corpus. Format: @@ -1729,6 +1759,22 @@ class Corpus(proto.Message): search_capability_setting (google.cloud.visionai_v1.types.SearchCapabilitySetting): Default search capability setting on corpus level. + satisfies_pzs (bool): + Output only. This boolean field is only set + for projects that have Physical Zone Separation + enabled via an Org Policy constraint. It is set + to true when the corpus is a valid zone + separated corpus and false if it isn't. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + satisfies_pzi (bool): + Output only. This boolean field is only set + for projects that have Physical Zone Isolation + enabled via an Org Policy constraint. It is set + to true when the corpus is a valid zone isolated + corpus and false if it isn't. + + This field is a member of `oneof`_ ``_satisfies_pzi``. """ class Type(proto.Enum): @@ -1778,6 +1824,16 @@ class Type(proto.Enum): number=8, message="SearchCapabilitySetting", ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=11, + optional=True, + ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=12, + optional=True, + ) class GetCorpusRequest(proto.Message): @@ -2874,6 +2930,11 @@ class ImportAssetsMetadata(proto.Message): Attributes: metadata (google.cloud.visionai_v1.types.OperationMetadata): The metadata of the operation. + status (google.cloud.visionai_v1.types.BatchOperationStatus): + The importing status including partial + failures, if the implementation can provide such + information during the progress of the + ImportAssets. """ metadata: common.OperationMetadata = proto.Field( @@ -2881,6 +2942,33 @@ class ImportAssetsMetadata(proto.Message): number=1, message=common.OperationMetadata, ) + status: "BatchOperationStatus" = proto.Field( + proto.MESSAGE, + number=2, + message="BatchOperationStatus", + ) + + +class BatchOperationStatus(proto.Message): + r"""The batch operation status. + + Attributes: + success_count (int): + The count of assets (together with their + annotations if any) successfully ingested. + failure_count (int): + The count of assets failed to ingested; it + might be due to the annotation ingestion error. + """ + + success_count: int = proto.Field( + proto.INT32, + number=1, + ) + failure_count: int = proto.Field( + proto.INT32, + number=2, + ) class ImportAssetsResponse(proto.Message): @@ -3084,6 +3172,9 @@ class IndexEndpoint(proto.Message): r"""Message representing IndexEndpoint resource. Indexes are deployed into it. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Output only. Resource name of the IndexEndpoint. Format: @@ -3125,6 +3216,22 @@ class IndexEndpoint(proto.Message): Output only. Create timestamp. update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Update timestamp. + satisfies_pzs (bool): + Output only. This boolean field is only set + for projects that have Physical Zone Separation + enabled via an Org Policy constraint. It is set + to true when the index endpoint is a valid zone + separated index endpoint and false if it isn't. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + satisfies_pzi (bool): + Output only. This boolean field is only set + for projects that have Physical Zone Isolation + enabled via an Org Policy constraint. It is set + to true when the index endpoint is a valid zone + isolated index endpoint and false if it isn't. + + This field is a member of `oneof`_ ``_satisfies_pzi``. """ class State(proto.Enum): @@ -3185,6 +3292,16 @@ class State(proto.Enum): number=8, message=timestamp_pb2.Timestamp, ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=10, + optional=True, + ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=11, + optional=True, + ) class CreateIndexEndpointRequest(proto.Message): @@ -4639,11 +4756,13 @@ class SearchResultItem(proto.Message): segment (google.cloud.visionai_v1.types.Partition.TemporalPartition): The matched asset segment. relevance (float): - Relevance of this ``SearchResultItem`` to user search - request. Currently available only in Image Warehouse, and by - default represents cosine similarity. In the future can be - other measures such as "dot product" or "topicality" - requested in the search request. + Available to IMAGE corpus types. Relevance of this + ``SearchResultItem`` to user search query (text query or + image query). By default this represents cosine similarity + between the query and the retrieved media content. The value + is in the range of [-1, 1]. Note that search ranking is not + only decided by this relevance score, but also other factors + such as the match of annotations. requested_annotations (MutableSequence[google.cloud.visionai_v1.types.Annotation]): Search result annotations specified by result_annotation_keys in search request. diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/gapic_version.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/gapic_version.py index 558c8aab67c5..114e40645800 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/gapic_version.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.3" # {x-release-please-version} diff --git a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/types/platform.py b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/types/platform.py index fd8a8b002e97..1710e7124467 100644 --- a/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/types/platform.py +++ b/packages/google-cloud-visionai/google/cloud/visionai_v1alpha1/types/platform.py @@ -3253,7 +3253,8 @@ class VertexCustomConfig(proto.Message): 'instanceId': STRING; 'node': STRING; 'processor': STRING; - } + + } """ max_prediction_fps: int = proto.Field( diff --git a/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1.json b/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1.json index db06e43868dd..37adaa87e6b4 100644 --- a/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1.json +++ b/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-visionai", - "version": "0.1.0" + "version": "0.1.3" }, "snippets": [ { diff --git a/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1alpha1.json b/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1alpha1.json index f7d1e3f46ede..26bf9524f8af 100644 --- a/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1alpha1.json +++ b/packages/google-cloud-visionai/samples/generated_samples/snippet_metadata_google.cloud.visionai.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-visionai", - "version": "0.1.0" + "version": "0.1.3" }, "snippets": [ { diff --git a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_warehouse.py b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_warehouse.py index 0b6e0f0a95cc..4cfebb38f721 100644 --- a/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_warehouse.py +++ b/packages/google-cloud-visionai/tests/unit/gapic/visionai_v1/test_warehouse.py @@ -5976,6 +5976,8 @@ def test_get_index(request_type, transport: str = "grpc"): display_name="display_name_value", description="description_value", state=warehouse.Index.State.CREATING, + satisfies_pzs=True, + satisfies_pzi=True, entire_corpus=True, ) response = client.get_index(request) @@ -5992,6 +5994,8 @@ def test_get_index(request_type, transport: str = "grpc"): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.state == warehouse.Index.State.CREATING + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_index_empty_call(): @@ -6094,6 +6098,8 @@ async def test_get_index_empty_call_async(): display_name="display_name_value", description="description_value", state=warehouse.Index.State.CREATING, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_index() @@ -6164,6 +6170,8 @@ async def test_get_index_async( display_name="display_name_value", description="description_value", state=warehouse.Index.State.CREATING, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_index(request) @@ -6180,6 +6188,8 @@ async def test_get_index_async( assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.state == warehouse.Index.State.CREATING + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -7656,6 +7666,8 @@ def test_get_corpus(request_type, transport: str = "grpc"): display_name="display_name_value", description="description_value", type_=warehouse.Corpus.Type.STREAM_VIDEO, + satisfies_pzs=True, + satisfies_pzi=True, ) response = client.get_corpus(request) @@ -7671,6 +7683,8 @@ def test_get_corpus(request_type, transport: str = "grpc"): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.type_ == warehouse.Corpus.Type.STREAM_VIDEO + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_corpus_empty_call(): @@ -7773,6 +7787,8 @@ async def test_get_corpus_empty_call_async(): display_name="display_name_value", description="description_value", type_=warehouse.Corpus.Type.STREAM_VIDEO, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_corpus() @@ -7843,6 +7859,8 @@ async def test_get_corpus_async( display_name="display_name_value", description="description_value", type_=warehouse.Corpus.Type.STREAM_VIDEO, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_corpus(request) @@ -7859,6 +7877,8 @@ async def test_get_corpus_async( assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.type_ == warehouse.Corpus.Type.STREAM_VIDEO + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -8030,6 +8050,8 @@ def test_update_corpus(request_type, transport: str = "grpc"): display_name="display_name_value", description="description_value", type_=warehouse.Corpus.Type.STREAM_VIDEO, + satisfies_pzs=True, + satisfies_pzi=True, ) response = client.update_corpus(request) @@ -8045,6 +8067,8 @@ def test_update_corpus(request_type, transport: str = "grpc"): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.type_ == warehouse.Corpus.Type.STREAM_VIDEO + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_update_corpus_empty_call(): @@ -8143,6 +8167,8 @@ async def test_update_corpus_empty_call_async(): display_name="display_name_value", description="description_value", type_=warehouse.Corpus.Type.STREAM_VIDEO, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.update_corpus() @@ -8215,6 +8241,8 @@ async def test_update_corpus_async( display_name="display_name_value", description="description_value", type_=warehouse.Corpus.Type.STREAM_VIDEO, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.update_corpus(request) @@ -8231,6 +8259,8 @@ async def test_update_corpus_async( assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.type_ == warehouse.Corpus.Type.STREAM_VIDEO + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -20567,6 +20597,8 @@ def test_get_index_endpoint(request_type, transport: str = "grpc"): display_name="display_name_value", description="description_value", state=warehouse.IndexEndpoint.State.CREATING, + satisfies_pzs=True, + satisfies_pzi=True, ) response = client.get_index_endpoint(request) @@ -20582,6 +20614,8 @@ def test_get_index_endpoint(request_type, transport: str = "grpc"): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.state == warehouse.IndexEndpoint.State.CREATING + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_index_endpoint_empty_call(): @@ -20694,6 +20728,8 @@ async def test_get_index_endpoint_empty_call_async(): display_name="display_name_value", description="description_value", state=warehouse.IndexEndpoint.State.CREATING, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_index_endpoint() @@ -20768,6 +20804,8 @@ async def test_get_index_endpoint_async( display_name="display_name_value", description="description_value", state=warehouse.IndexEndpoint.State.CREATING, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_index_endpoint(request) @@ -20784,6 +20822,8 @@ async def test_get_index_endpoint_async( assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.state == warehouse.IndexEndpoint.State.CREATING + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -29750,6 +29790,8 @@ def test_create_index_rest(request_type): "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, "deployed_indexes": [{"index_endpoint": "index_endpoint_value"}], + "satisfies_pzs": True, + "satisfies_pzi": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -30141,6 +30183,8 @@ def test_update_index_rest(request_type): "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, "deployed_indexes": [{"index_endpoint": "index_endpoint_value"}], + "satisfies_pzs": True, + "satisfies_pzi": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -30530,6 +30574,8 @@ def test_get_index_rest(request_type): display_name="display_name_value", description="description_value", state=warehouse.Index.State.CREATING, + satisfies_pzs=True, + satisfies_pzi=True, entire_corpus=True, ) @@ -30550,6 +30596,8 @@ def test_get_index_rest(request_type): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.state == warehouse.Index.State.CREATING + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_index_rest_use_cached_wrapped_rpc(): @@ -31503,6 +31551,8 @@ def test_create_corpus_rest(request_type): "default_ttl": {"seconds": 751, "nanos": 543}, "type_": 1, "search_capability_setting": {"search_capabilities": [{"type_": 1}]}, + "satisfies_pzs": True, + "satisfies_pzi": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -31884,6 +31934,8 @@ def test_get_corpus_rest(request_type): display_name="display_name_value", description="description_value", type_=warehouse.Corpus.Type.STREAM_VIDEO, + satisfies_pzs=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -31903,6 +31955,8 @@ def test_get_corpus_rest(request_type): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.type_ == warehouse.Corpus.Type.STREAM_VIDEO + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_corpus_rest_use_cached_wrapped_rpc(): @@ -32183,6 +32237,8 @@ def test_update_corpus_rest(request_type): "default_ttl": {"seconds": 751, "nanos": 543}, "type_": 1, "search_capability_setting": {"search_capabilities": [{"type_": 1}]}, + "satisfies_pzs": True, + "satisfies_pzi": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -32261,6 +32317,8 @@ def get_message_fields(field): display_name="display_name_value", description="description_value", type_=warehouse.Corpus.Type.STREAM_VIDEO, + satisfies_pzs=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -32280,6 +32338,8 @@ def get_message_fields(field): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.type_ == warehouse.Corpus.Type.STREAM_VIDEO + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_update_corpus_rest_use_cached_wrapped_rpc(): @@ -41934,6 +41994,8 @@ def test_create_index_endpoint_rest(request_type): "labels": {}, "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, + "satisfies_pzs": True, + "satisfies_pzi": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -42329,6 +42391,8 @@ def test_get_index_endpoint_rest(request_type): display_name="display_name_value", description="description_value", state=warehouse.IndexEndpoint.State.CREATING, + satisfies_pzs=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -42348,6 +42412,8 @@ def test_get_index_endpoint_rest(request_type): assert response.display_name == "display_name_value" assert response.description == "description_value" assert response.state == warehouse.IndexEndpoint.State.CREATING + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_index_endpoint_rest_use_cached_wrapped_rpc(): @@ -43024,6 +43090,8 @@ def test_update_index_endpoint_rest(request_type): "labels": {}, "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, + "satisfies_pzs": True, + "satisfies_pzi": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-shopping-css/CHANGELOG.md b/packages/google-shopping-css/CHANGELOG.md index e18689dcec95..9150653216a2 100644 --- a/packages/google-shopping-css/CHANGELOG.md +++ b/packages/google-shopping-css/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.1.8](https://github.com/googleapis/google-cloud-python/compare/google-shopping-css-v0.1.7...google-shopping-css-v0.1.8) (2024-08-22) + + +### Documentation + +* [google-shopping-css] update `Certification` field descriptions ([#13027](https://github.com/googleapis/google-cloud-python/issues/13027)) ([70e2dd5](https://github.com/googleapis/google-cloud-python/commit/70e2dd5f024dd5c94a5e02b442bbab7e6e5f38fe)) + ## [0.1.7](https://github.com/googleapis/google-cloud-python/compare/google-shopping-css-v0.1.6...google-shopping-css-v0.1.7) (2024-07-30) diff --git a/packages/google-shopping-css/google/shopping/css/gapic_version.py b/packages/google-shopping-css/google/shopping/css/gapic_version.py index 558c8aab67c5..ec8d212c9160 100644 --- a/packages/google-shopping-css/google/shopping/css/gapic_version.py +++ b/packages/google-shopping-css/google/shopping/css/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.8" # {x-release-please-version} diff --git a/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py b/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py index 558c8aab67c5..ec8d212c9160 100644 --- a/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py +++ b/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.8" # {x-release-please-version} diff --git a/packages/google-shopping-css/google/shopping/css_v1/types/css_product_common.py b/packages/google-shopping-css/google/shopping/css_v1/types/css_product_common.py index cec3dd7281db..ca3f7734688f 100644 --- a/packages/google-shopping-css/google/shopping/css_v1/types/css_product_common.py +++ b/packages/google-shopping-css/google/shopping/css_v1/types/css_product_common.py @@ -494,15 +494,27 @@ class Attributes(proto.Message): class Certification(proto.Message): - r"""The certification for the product. + r"""The certification for the product. Use the this attribute to + describe certifications, such as energy efficiency ratings, + associated with a product. Attributes: name (str): - Name of the certification. + The name of the certification. At this time, + the most common value is "EPREL", which + represents energy efficiency certifications in + the EU European Registry for Energy Labeling + (EPREL) database. authority (str): - Name of the certification body. + The authority or certification body responsible for issuing + the certification. At this time, the most common value is + "EC" or “European_Commission” for energy labels in the EU. code (str): - A unique code to identify the certification. + The code of the certification. For example, + for the EPREL certificate with the link + https://eprel.ec.europa.eu/screen/product/dishwashers2019/123456 + the code is 123456. The code is required for + European Energy Labels. """ name: str = proto.Field( diff --git a/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json b/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json index 5b682921bee5..301d93f4bbc4 100644 --- a/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json +++ b/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-shopping-css", - "version": "0.1.0" + "version": "0.1.8" }, "snippets": [ { diff --git a/release-please-config.json b/release-please-config.json index fcf90e2a8f7e..ea9a89e5e9fc 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -1542,7 +1542,13 @@ "component": "google-cloud-gke-connect-gateway", "extra-files": [ "google/cloud/gkeconnect/gateway/gapic_version.py", + "google/cloud/gkeconnect/gateway_v1/gapic_version.py", "google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1.json", + "type": "json" + }, { "jsonpath": "$.clientLibrary.version", "path": "samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json", diff --git a/renovate.json b/renovate.json index a35fc4d36c4e..789608ba73c9 100644 --- a/renovate.json +++ b/renovate.json @@ -6,6 +6,7 @@ "schedule:weekly" ], "ignorePaths": [ - ".kokoro/requirements.txt" + ".kokoro/requirements.txt", + ".kokoro/requirements-aoss.txt" ] }