diff --git a/.librarian/config.yaml b/.librarian/config.yaml index 4e6a62c4c085..c645e08c881e 100644 --- a/.librarian/config.yaml +++ b/.librarian/config.yaml @@ -1,39 +1,558 @@ -global_files_allowlist: - # Allow the container to read and write the root `CHANGELOG.md` - # file during the `release` step to update the latest client library - # versions which are hardcoded in the file. - - path: "CHANGELOG.md" - permissions: "read-write" +# This file is being migrated to librarian@latest, and is no longer maintained by hand. +global_files_allowlist: + - path: CHANGELOG.md + permissions: read-write libraries: -# libraries have "release_blocked: true" so that releases are -# explicitly initiated. -# TODO(https://github.com/googleapis/google-cloud-python/issues/16180): -# `google-django-spanner` is blocked until the presubmits are green. - - id: "google-django-spanner" + - id: google-django-spanner release_blocked: true -# TODO(https://github.com/googleapis/google-cloud-python/issues/16487): -# Allow releases for google-cloud-storage once this bug is fixed. - - id: "google-cloud-storage" + - generate_blocked: true + id: google-cloud-storage release_blocked: true -# TODO(https://github.com/googleapis/google-cloud-python/issues/16494): -# Allow generation for google-cloud-bigtable once this bug is fixed. - - id: "google-cloud-bigtable" - generate_blocked: true -# TODO(https://github.com/googleapis/google-cloud-python/issues/16489): -# Allow releases for bigframes once the bug above is fixed. - - id: "bigframes" + - generate_blocked: true + id: google-cloud-bigtable + - generate_blocked: true + id: bigframes release_blocked: true -# TODO(https://github.com/googleapis/google-cloud-python/issues/16506): -# Allow generation/release for google-cloud-firestore once this bug is fixed. - - id: "google-cloud-firestore" - generate_blocked: true + - generate_blocked: true + id: google-cloud-firestore release_blocked: true -# TODO(https://github.com/googleapis/google-cloud-python/issues/16165): -# Allow generation for google-cloud-dialogflow once this bug is fixed. - - id: "google-cloud-dialogflow" - generate_blocked: true -# TODO(https://github.com/googleapis/google-cloud-python/issues/16520): -# Allow release for google-crc32c once this bug is fixed. - - id: "google-crc32c" + - generate_blocked: true + id: google-cloud-dialogflow + - generate_blocked: true + id: google-crc32c release_blocked: true + - generate_blocked: true + id: bigquery-magics + - generate_blocked: true + id: db-dtypes + - generate_blocked: true + id: django-google-spanner + - generate_blocked: true + id: gapic-generator + - generate_blocked: true + id: gcp-sphinx-docfx-yaml + - generate_blocked: true + id: google-ads-admanager + - generate_blocked: true + id: google-ads-datamanager + - generate_blocked: true + id: google-ads-marketingplatform-admin + - generate_blocked: true + id: google-ai-generativelanguage + - generate_blocked: true + id: google-analytics-admin + - generate_blocked: true + id: google-analytics-data + - generate_blocked: true + id: google-api-core + - generate_blocked: true + id: google-apps-card + - generate_blocked: true + id: google-apps-chat + - generate_blocked: true + id: google-apps-events-subscriptions + - generate_blocked: true + id: google-apps-meet + - generate_blocked: true + id: google-apps-script-type + - generate_blocked: true + id: google-area120-tables + - generate_blocked: true + id: google-auth + - generate_blocked: true + id: google-auth-httplib2 + - generate_blocked: true + id: google-auth-oauthlib + - generate_blocked: true + id: google-cloud-access-approval + - generate_blocked: true + id: google-cloud-access-context-manager + - generate_blocked: true + id: google-cloud-advisorynotifications + - generate_blocked: true + id: google-cloud-alloydb + - generate_blocked: true + id: google-cloud-alloydb-connectors + - generate_blocked: true + id: google-cloud-api-gateway + - generate_blocked: true + id: google-cloud-api-keys + - generate_blocked: true + id: google-cloud-apigee-connect + - generate_blocked: true + id: google-cloud-apigee-registry + - generate_blocked: true + id: google-cloud-apihub + - generate_blocked: true + id: google-cloud-apiregistry + - generate_blocked: true + id: google-cloud-appengine-admin + - generate_blocked: true + id: google-cloud-appengine-logging + - generate_blocked: true + id: google-cloud-apphub + - generate_blocked: true + id: google-cloud-artifact-registry + - generate_blocked: true + id: google-cloud-asset + - generate_blocked: true + id: google-cloud-assured-workloads + - generate_blocked: true + id: google-cloud-audit-log + - generate_blocked: true + id: google-cloud-auditmanager + - generate_blocked: true + id: google-cloud-automl + - generate_blocked: true + id: google-cloud-backupdr + - generate_blocked: true + id: google-cloud-bare-metal-solution + - generate_blocked: true + id: google-cloud-batch + - generate_blocked: true + id: google-cloud-beyondcorp-appconnections + - generate_blocked: true + id: google-cloud-beyondcorp-appconnectors + - generate_blocked: true + id: google-cloud-beyondcorp-appgateways + - generate_blocked: true + id: google-cloud-beyondcorp-clientconnectorservices + - generate_blocked: true + id: google-cloud-beyondcorp-clientgateways + - generate_blocked: true + id: google-cloud-biglake + - generate_blocked: true + id: google-cloud-biglake-hive + - generate_blocked: true + id: google-cloud-bigquery + - generate_blocked: true + id: google-cloud-bigquery-analyticshub + - generate_blocked: true + id: google-cloud-bigquery-biglake + - generate_blocked: true + id: google-cloud-bigquery-connection + - generate_blocked: true + id: google-cloud-bigquery-data-exchange + - generate_blocked: true + id: google-cloud-bigquery-datapolicies + - generate_blocked: true + id: google-cloud-bigquery-datatransfer + - generate_blocked: true + id: google-cloud-bigquery-logging + - generate_blocked: true + id: google-cloud-bigquery-migration + - generate_blocked: true + id: google-cloud-bigquery-reservation + - generate_blocked: true + id: google-cloud-bigquery-storage + - generate_blocked: true + id: google-cloud-billing + - generate_blocked: true + id: google-cloud-billing-budgets + - generate_blocked: true + id: google-cloud-binary-authorization + - generate_blocked: true + id: google-cloud-build + - generate_blocked: true + id: google-cloud-capacityplanner + - generate_blocked: true + id: google-cloud-certificate-manager + - generate_blocked: true + id: google-cloud-ces + - generate_blocked: true + id: google-cloud-channel + - generate_blocked: true + id: google-cloud-chronicle + - generate_blocked: true + id: google-cloud-cloudcontrolspartner + - generate_blocked: true + id: google-cloud-cloudsecuritycompliance + - generate_blocked: true + id: google-cloud-commerce-consumer-procurement + - generate_blocked: true + id: google-cloud-common + - generate_blocked: true + id: google-cloud-compute + - generate_blocked: true + id: google-cloud-compute-v1beta + - generate_blocked: true + id: google-cloud-confidentialcomputing + - generate_blocked: true + id: google-cloud-config + - generate_blocked: true + id: google-cloud-configdelivery + - generate_blocked: true + id: google-cloud-contact-center-insights + - generate_blocked: true + id: google-cloud-container + - generate_blocked: true + id: google-cloud-containeranalysis + - generate_blocked: true + id: google-cloud-contentwarehouse + - generate_blocked: true + id: google-cloud-core + - generate_blocked: true + id: google-cloud-data-fusion + - generate_blocked: true + id: google-cloud-data-qna + - generate_blocked: true + id: google-cloud-databasecenter + - generate_blocked: true + id: google-cloud-datacatalog + - generate_blocked: true + id: google-cloud-datacatalog-lineage + - generate_blocked: true + id: google-cloud-datacatalog-lineage-configmanagement + - generate_blocked: true + id: google-cloud-dataflow-client + - generate_blocked: true + id: google-cloud-dataform + - generate_blocked: true + id: google-cloud-datalabeling + - generate_blocked: true + id: google-cloud-dataplex + - generate_blocked: true + id: google-cloud-dataproc + - generate_blocked: true + id: google-cloud-dataproc-metastore + - generate_blocked: true + id: google-cloud-datastore + - generate_blocked: true + id: google-cloud-datastream + - generate_blocked: true + id: google-cloud-deploy + - generate_blocked: true + id: google-cloud-developerconnect + - generate_blocked: true + id: google-cloud-devicestreaming + - generate_blocked: true + id: google-cloud-dialogflow-cx + - generate_blocked: true + id: google-cloud-discoveryengine + - generate_blocked: true + id: google-cloud-dlp + - generate_blocked: true + id: google-cloud-dms + - generate_blocked: true + id: google-cloud-dns + - generate_blocked: true + id: google-cloud-documentai + - generate_blocked: true + id: google-cloud-documentai-toolbox + - generate_blocked: true + id: google-cloud-domains + - generate_blocked: true + id: google-cloud-edgecontainer + - generate_blocked: true + id: google-cloud-edgenetwork + - generate_blocked: true + id: google-cloud-enterpriseknowledgegraph + - generate_blocked: true + id: google-cloud-error-reporting + - generate_blocked: true + id: google-cloud-essential-contacts + - generate_blocked: true + id: google-cloud-eventarc + - generate_blocked: true + id: google-cloud-eventarc-publishing + - generate_blocked: true + id: google-cloud-filestore + - generate_blocked: true + id: google-cloud-financialservices + - generate_blocked: true + id: google-cloud-functions + - generate_blocked: true + id: google-cloud-gdchardwaremanagement + - generate_blocked: true + id: google-cloud-geminidataanalytics + - generate_blocked: true + id: google-cloud-gke-backup + - generate_blocked: true + id: google-cloud-gke-connect-gateway + - generate_blocked: true + id: google-cloud-gke-hub + - generate_blocked: true + id: google-cloud-gke-multicloud + - generate_blocked: true + id: google-cloud-gkerecommender + - generate_blocked: true + id: google-cloud-gsuiteaddons + - generate_blocked: true + id: google-cloud-hypercomputecluster + - generate_blocked: true + id: google-cloud-iam + - generate_blocked: true + id: google-cloud-iam-logging + - generate_blocked: true + id: google-cloud-iap + - generate_blocked: true + id: google-cloud-ids + - generate_blocked: true + id: google-cloud-kms + - generate_blocked: true + id: google-cloud-kms-inventory + - generate_blocked: true + id: google-cloud-language + - generate_blocked: true + id: google-cloud-licensemanager + - generate_blocked: true + id: google-cloud-life-sciences + - generate_blocked: true + id: google-cloud-locationfinder + - generate_blocked: true + id: google-cloud-logging + - generate_blocked: true + id: google-cloud-lustre + - generate_blocked: true + id: google-cloud-maintenance-api + - generate_blocked: true + id: google-cloud-managed-identities + - generate_blocked: true + id: google-cloud-managedkafka + - generate_blocked: true + id: google-cloud-managedkafka-schemaregistry + - generate_blocked: true + id: google-cloud-media-translation + - generate_blocked: true + id: google-cloud-memcache + - generate_blocked: true + id: google-cloud-memorystore + - generate_blocked: true + id: google-cloud-migrationcenter + - generate_blocked: true + id: google-cloud-modelarmor + - generate_blocked: true + id: google-cloud-monitoring + - generate_blocked: true + id: google-cloud-monitoring-dashboards + - generate_blocked: true + id: google-cloud-monitoring-metrics-scopes + - generate_blocked: true + id: google-cloud-ndb + - generate_blocked: true + id: google-cloud-netapp + - generate_blocked: true + id: google-cloud-network-connectivity + - generate_blocked: true + id: google-cloud-network-management + - generate_blocked: true + id: google-cloud-network-security + - generate_blocked: true + id: google-cloud-network-services + - generate_blocked: true + id: google-cloud-notebooks + - generate_blocked: true + id: google-cloud-optimization + - generate_blocked: true + id: google-cloud-oracledatabase + - generate_blocked: true + id: google-cloud-orchestration-airflow + - generate_blocked: true + id: google-cloud-org-policy + - generate_blocked: true + id: google-cloud-os-config + - generate_blocked: true + id: google-cloud-os-login + - generate_blocked: true + id: google-cloud-parallelstore + - generate_blocked: true + id: google-cloud-parametermanager + - generate_blocked: true + id: google-cloud-phishing-protection + - generate_blocked: true + id: google-cloud-policy-troubleshooter + - generate_blocked: true + id: google-cloud-policysimulator + - generate_blocked: true + id: google-cloud-policytroubleshooter-iam + - generate_blocked: true + id: google-cloud-private-ca + - generate_blocked: true + id: google-cloud-private-catalog + - generate_blocked: true + id: google-cloud-privilegedaccessmanager + - generate_blocked: true + id: google-cloud-pubsub + - generate_blocked: true + id: google-cloud-quotas + - generate_blocked: true + id: google-cloud-rapidmigrationassessment + - generate_blocked: true + id: google-cloud-recaptcha-enterprise + - generate_blocked: true + id: google-cloud-recommendations-ai + - generate_blocked: true + id: google-cloud-recommender + - generate_blocked: true + id: google-cloud-redis + - generate_blocked: true + id: google-cloud-redis-cluster + - generate_blocked: true + id: google-cloud-resource-manager + - generate_blocked: true + id: google-cloud-retail + - generate_blocked: true + id: google-cloud-run + - generate_blocked: true + id: google-cloud-runtimeconfig + - generate_blocked: true + id: google-cloud-saasplatform-saasservicemgmt + - generate_blocked: true + id: google-cloud-scheduler + - generate_blocked: true + id: google-cloud-secret-manager + - generate_blocked: true + id: google-cloud-securesourcemanager + - generate_blocked: true + id: google-cloud-security-publicca + - generate_blocked: true + id: google-cloud-securitycenter + - generate_blocked: true + id: google-cloud-securitycentermanagement + - generate_blocked: true + id: google-cloud-service-control + - generate_blocked: true + id: google-cloud-service-directory + - generate_blocked: true + id: google-cloud-service-management + - generate_blocked: true + id: google-cloud-service-usage + - generate_blocked: true + id: google-cloud-servicehealth + - generate_blocked: true + id: google-cloud-shell + - generate_blocked: true + id: google-cloud-source-context + - generate_blocked: true + id: google-cloud-spanner + - generate_blocked: true + id: google-cloud-speech + - generate_blocked: true + id: google-cloud-storage-control + - generate_blocked: true + id: google-cloud-storage-transfer + - generate_blocked: true + id: google-cloud-storagebatchoperations + - generate_blocked: true + id: google-cloud-storageinsights + - generate_blocked: true + id: google-cloud-support + - generate_blocked: true + id: google-cloud-talent + - generate_blocked: true + id: google-cloud-tasks + - generate_blocked: true + id: google-cloud-telcoautomation + - generate_blocked: true + id: google-cloud-testutils + - generate_blocked: true + id: google-cloud-texttospeech + - generate_blocked: true + id: google-cloud-tpu + - generate_blocked: true + id: google-cloud-trace + - generate_blocked: true + id: google-cloud-translate + - generate_blocked: true + id: google-cloud-vectorsearch + - generate_blocked: true + id: google-cloud-video-live-stream + - generate_blocked: true + id: google-cloud-video-stitcher + - generate_blocked: true + id: google-cloud-video-transcoder + - generate_blocked: true + id: google-cloud-videointelligence + - generate_blocked: true + id: google-cloud-vision + - generate_blocked: true + id: google-cloud-visionai + - generate_blocked: true + id: google-cloud-vm-migration + - generate_blocked: true + id: google-cloud-vmwareengine + - generate_blocked: true + id: google-cloud-vpc-access + - generate_blocked: true + id: google-cloud-webrisk + - generate_blocked: true + id: google-cloud-websecurityscanner + - generate_blocked: true + id: google-cloud-workflows + - generate_blocked: true + id: google-cloud-workloadmanager + - generate_blocked: true + id: google-cloud-workstations + - generate_blocked: true + id: google-geo-type + - generate_blocked: true + id: google-maps-addressvalidation + - generate_blocked: true + id: google-maps-areainsights + - generate_blocked: true + id: google-maps-fleetengine + - generate_blocked: true + id: google-maps-fleetengine-delivery + - generate_blocked: true + id: google-maps-geocode + - generate_blocked: true + id: google-maps-mapsplatformdatasets + - generate_blocked: true + id: google-maps-navconnect + - generate_blocked: true + id: google-maps-places + - generate_blocked: true + id: google-maps-routeoptimization + - generate_blocked: true + id: google-maps-routing + - generate_blocked: true + id: google-maps-solar + - generate_blocked: true + id: google-resumable-media + - generate_blocked: true + id: google-shopping-css + - generate_blocked: true + id: google-shopping-merchant-accounts + - generate_blocked: true + id: google-shopping-merchant-conversions + - generate_blocked: true + id: google-shopping-merchant-datasources + - generate_blocked: true + id: google-shopping-merchant-inventories + - generate_blocked: true + id: google-shopping-merchant-issueresolution + - generate_blocked: true + id: google-shopping-merchant-lfp + - generate_blocked: true + id: google-shopping-merchant-notifications + - generate_blocked: true + id: google-shopping-merchant-ordertracking + - generate_blocked: true + id: google-shopping-merchant-products + - generate_blocked: true + id: google-shopping-merchant-productstudio + - generate_blocked: true + id: google-shopping-merchant-promotions + - generate_blocked: true + id: google-shopping-merchant-quota + - generate_blocked: true + id: google-shopping-merchant-reports + - generate_blocked: true + id: google-shopping-merchant-reviews + - generate_blocked: true + id: google-shopping-type + - generate_blocked: true + id: googleapis-common-protos + - generate_blocked: true + id: grafeas + - generate_blocked: true + id: grpc-google-iam-v1 + - generate_blocked: true + id: pandas-gbq + - generate_blocked: true + id: proto-plus + - generate_blocked: true + id: sqlalchemy-bigquery + - generate_blocked: true + id: sqlalchemy-spanner diff --git a/.librarian/generator-input/client-post-processing/avoid-double-copyright.yaml b/.librarian/generator-input/client-post-processing/avoid-double-copyright.yaml new file mode 100644 index 000000000000..62ebe2843c17 --- /dev/null +++ b/.librarian/generator-input/client-post-processing/avoid-double-copyright.yaml @@ -0,0 +1,62 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +description: Mitigate synthtool adding a second copyright notice when one already exists +url: https://github.com/googleapis/librarian/issues/4322 +replacements: + - paths: [ + packages/googleapis-common-protos/google/gapic/metadata/gapic_metadata_pb2.pyi, + packages/googleapis-common-protos/google/longrunning/operations_proto_pb2.pyi + ] + before: | + # Copyright 2025 Google LLC + # + # Licensed under the Apache License, Version 2.0 \(the "License"\); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License.\n + # Copyright 2025 Google LLC + # + # Licensed under the Apache License, Version 2.0 \(the "License"\); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + after: | + # Copyright 2025 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + count: 2 diff --git a/.librarian/generator-input/client-post-processing/integrate-isolated-handwritten-code.yaml b/.librarian/generator-input/client-post-processing/integrate-isolated-handwritten-code.yaml index 347fce15df05..9c64e07b52cc 100644 --- a/.librarian/generator-input/client-post-processing/integrate-isolated-handwritten-code.yaml +++ b/.librarian/generator-input/client-post-processing/integrate-isolated-handwritten-code.yaml @@ -97,6 +97,14 @@ replacements: translate_v3/services_ translate_v3/types_ + API Reference + ------------- + .. toctree:: + :maxdepth: 2 + + translate_v2/services_ + translate_v2/types_ + API Reference ------------- .. toctree:: diff --git a/.librarian/state.yaml b/.librarian/state.yaml index 55dc06277b67..303558e48db6 100644 --- a/.librarian/state.yaml +++ b/.librarian/state.yaml @@ -1,3 +1,16 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:234b9d1f2ddb057ed7ac6a38db0bf8163d839c65c6cf88ade52530cddebce59e libraries: - id: bigframes @@ -8,6 +21,9 @@ libraries: - packages/bigframes preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/bigframes/.repo-metadata.json + - packages/bigframes/docs/README.rst tag_format: '{id}-v{version}' - id: bigquery-magics version: 0.12.2 @@ -17,6 +33,9 @@ libraries: - packages/bigquery-magics preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/bigquery-magics/.repo-metadata.json + - packages/bigquery-magics/docs/README.rst tag_format: '{id}-v{version}' - id: db-dtypes version: 1.5.1 @@ -26,6 +45,9 @@ libraries: - packages/db-dtypes preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/db-dtypes/.repo-metadata.json + - packages/db-dtypes/docs/README.rst tag_format: '{id}-v{version}' - id: django-google-spanner version: 4.0.3 @@ -35,6 +57,9 @@ libraries: - packages/django-google-spanner preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/django-google-spanner/.repo-metadata.json + - packages/django-google-spanner/docs/README.rst tag_format: '{id}-v{version}' - id: gapic-generator version: 1.30.14 @@ -44,6 +69,9 @@ libraries: - packages/gapic-generator preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/gapic-generator/.repo-metadata.json + - packages/gapic-generator/docs/README.rst tag_format: '{id}-v{version}' - id: gcp-sphinx-docfx-yaml version: 3.2.5 @@ -53,6 +81,9 @@ libraries: - packages/gcp-sphinx-docfx-yaml preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/gcp-sphinx-docfx-yaml/.repo-metadata.json + - packages/gcp-sphinx-docfx-yaml/docs/README.rst tag_format: '{id}-v{version}' - id: google-ads-admanager version: 0.9.0 @@ -67,6 +98,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-ads-admanager/ + release_exclude_paths: + - packages/google-ads-admanager/.repo-metadata.json + - packages/google-ads-admanager/docs/README.rst tag_format: '{id}-v{version}' - id: google-ads-datamanager version: 0.8.0 @@ -81,6 +115,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-ads-datamanager + release_exclude_paths: + - packages/google-ads-datamanager/.repo-metadata.json + - packages/google-ads-datamanager/docs/README.rst tag_format: '{id}-v{version}' - id: google-ads-marketingplatform-admin version: 0.5.0 @@ -95,6 +132,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-ads-marketingplatform-admin/ + release_exclude_paths: + - packages/google-ads-marketingplatform-admin/.repo-metadata.json + - packages/google-ads-marketingplatform-admin/docs/README.rst tag_format: '{id}-v{version}' - id: google-ai-generativelanguage version: 0.11.0 @@ -117,6 +157,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-ai-generativelanguage/ + release_exclude_paths: + - packages/google-ai-generativelanguage/.repo-metadata.json + - packages/google-ai-generativelanguage/docs/README.rst tag_format: '{id}-v{version}' - id: google-analytics-admin version: 0.28.0 @@ -133,6 +176,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-analytics-admin/ + release_exclude_paths: + - packages/google-analytics-admin/.repo-metadata.json + - packages/google-analytics-admin/docs/README.rst tag_format: '{id}-v{version}' - id: google-analytics-data version: 0.21.0 @@ -149,6 +195,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-analytics-data/ + release_exclude_paths: + - packages/google-analytics-data/.repo-metadata.json + - packages/google-analytics-data/docs/README.rst tag_format: '{id}-v{version}' - id: google-api-core version: 2.30.2 @@ -158,13 +207,15 @@ libraries: - packages/google-api-core preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/google-api-core/.repo-metadata.json + - packages/google-api-core/docs/README.rst tag_format: '{id}-v{version}' - id: google-apps-card version: 0.6.0 last_generated_commit: 7a5706618f42f482acf583febcc7b977b66c25b2 apis: - path: google/apps/card/v1 - service_config: "" source_roots: - packages/google-apps-card preserve_regex: @@ -173,10 +224,13 @@ libraries: - tests/unit/gapic/card_v1/test_card.py remove_regex: - packages/google-apps-card/ + release_exclude_paths: + - packages/google-apps-card/.repo-metadata.json + - packages/google-apps-card/docs/README.rst tag_format: '{id}-v{version}' - id: google-apps-chat version: 0.7.0 - last_generated_commit: 7a5706618f42f482acf583febcc7b977b66c25b2 + last_generated_commit: cd090841ab172574e740c214c99df00aef9c0dee apis: - path: google/chat/v1 service_config: chat_v1.yaml @@ -187,6 +241,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-apps-chat/ + release_exclude_paths: + - packages/google-apps-chat/.repo-metadata.json + - packages/google-apps-chat/docs/README.rst tag_format: '{id}-v{version}' - id: google-apps-events-subscriptions version: 0.5.0 @@ -203,6 +260,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-apps-events-subscriptions/ + release_exclude_paths: + - packages/google-apps-events-subscriptions/.repo-metadata.json + - packages/google-apps-events-subscriptions/docs/README.rst tag_format: '{id}-v{version}' - id: google-apps-meet version: 0.4.0 @@ -219,25 +279,21 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-apps-meet/ + release_exclude_paths: + - packages/google-apps-meet/.repo-metadata.json + - packages/google-apps-meet/docs/README.rst tag_format: '{id}-v{version}' - id: google-apps-script-type version: 0.6.0 last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd apis: - path: google/apps/script/type - service_config: "" - path: google/apps/script/type/gmail - service_config: "" - path: google/apps/script/type/docs - service_config: "" - path: google/apps/script/type/drive - service_config: "" - path: google/apps/script/type/sheets - service_config: "" - path: google/apps/script/type/calendar - service_config: "" - path: google/apps/script/type/slides - service_config: "" source_roots: - packages/google-apps-script-type preserve_regex: @@ -252,6 +308,9 @@ libraries: - tests/unit/gapic/type/test_type.py remove_regex: - packages/google-apps-script-type + release_exclude_paths: + - packages/google-apps-script-type/.repo-metadata.json + - packages/google-apps-script-type/docs/README.rst tag_format: '{id}-v{version}' - id: google-area120-tables version: 0.14.0 @@ -266,6 +325,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-area120-tables/ + release_exclude_paths: + - packages/google-area120-tables/.repo-metadata.json + - packages/google-area120-tables/docs/README.rst tag_format: '{id}-v{version}' - id: google-auth version: 2.49.1 @@ -275,6 +337,9 @@ libraries: - packages/google-auth preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/google-auth/.repo-metadata.json + - packages/google-auth/docs/README.rst tag_format: '{id}-v{version}' - id: google-auth-httplib2 version: 0.3.1 @@ -284,6 +349,9 @@ libraries: - packages/google-auth-httplib2 preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/google-auth-httplib2/.repo-metadata.json + - packages/google-auth-httplib2/docs/README.rst tag_format: '{id}-v{version}' - id: google-auth-oauthlib version: 1.3.1 @@ -293,6 +361,9 @@ libraries: - packages/google-auth-oauthlib preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/google-auth-oauthlib/.repo-metadata.json + - packages/google-auth-oauthlib/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-access-approval version: 1.19.0 @@ -308,6 +379,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-access-approval/ + release_exclude_paths: + - packages/google-cloud-access-approval/.repo-metadata.json + - packages/google-cloud-access-approval/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-access-context-manager version: 0.4.0 @@ -316,7 +390,6 @@ libraries: - path: google/identity/accesscontextmanager/v1 service_config: accesscontextmanager_v1.yaml - path: google/identity/accesscontextmanager/type - service_config: "" source_roots: - packages/google-cloud-access-context-manager preserve_regex: [] @@ -325,6 +398,9 @@ libraries: - .repo-metadata.json - README.rst - docs/summary_overview.md + release_exclude_paths: + - packages/google-cloud-access-context-manager/.repo-metadata.json + - packages/google-cloud-access-context-manager/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-advisorynotifications version: 0.6.0 @@ -339,6 +415,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-advisorynotifications/ + release_exclude_paths: + - packages/google-cloud-advisorynotifications/.repo-metadata.json + - packages/google-cloud-advisorynotifications/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-alloydb version: 0.9.0 @@ -357,6 +436,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-alloydb/ + release_exclude_paths: + - packages/google-cloud-alloydb/.repo-metadata.json + - packages/google-cloud-alloydb/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-alloydb-connectors version: 0.4.0 @@ -376,6 +458,9 @@ libraries: - tests/unit/gapic/connectors_v1/test_connectors.py remove_regex: - packages/google-cloud-alloydb-connectors/ + release_exclude_paths: + - packages/google-cloud-alloydb-connectors/.repo-metadata.json + - packages/google-cloud-alloydb-connectors/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-api-gateway version: 1.15.0 @@ -390,6 +475,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-api-gateway/ + release_exclude_paths: + - packages/google-cloud-api-gateway/.repo-metadata.json + - packages/google-cloud-api-gateway/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-api-keys version: 0.8.0 @@ -404,6 +492,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-api-keys/ + release_exclude_paths: + - packages/google-cloud-api-keys/.repo-metadata.json + - packages/google-cloud-api-keys/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-apigee-connect version: 1.15.0 @@ -418,6 +509,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-apigee-connect/ + release_exclude_paths: + - packages/google-cloud-apigee-connect/.repo-metadata.json + - packages/google-cloud-apigee-connect/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-apigee-registry version: 0.9.0 @@ -432,6 +526,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-apigee-registry/ + release_exclude_paths: + - packages/google-cloud-apigee-registry/.repo-metadata.json + - packages/google-cloud-apigee-registry/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-apihub version: 0.6.0 @@ -446,6 +543,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-apihub/ + release_exclude_paths: + - packages/google-cloud-apihub/.repo-metadata.json + - packages/google-cloud-apihub/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-apiregistry version: 0.2.0 @@ -464,6 +564,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-apiregistry + release_exclude_paths: + - packages/google-cloud-apiregistry/.repo-metadata.json + - packages/google-cloud-apiregistry/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-appengine-admin version: 1.17.0 @@ -478,13 +581,15 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-appengine-admin/ + release_exclude_paths: + - packages/google-cloud-appengine-admin/.repo-metadata.json + - packages/google-cloud-appengine-admin/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-appengine-logging version: 1.9.0 last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd apis: - path: google/appengine/logging/v1 - service_config: "" source_roots: - packages/google-cloud-appengine-logging preserve_regex: @@ -493,6 +598,9 @@ libraries: - tests/unit/gapic/appengine_logging_v1/test_appengine_logging_v1.py remove_regex: - packages/google-cloud-appengine-logging/ + release_exclude_paths: + - packages/google-cloud-appengine-logging/.repo-metadata.json + - packages/google-cloud-appengine-logging/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-apphub version: 0.4.0 @@ -507,6 +615,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-apphub/ + release_exclude_paths: + - packages/google-cloud-apphub/.repo-metadata.json + - packages/google-cloud-apphub/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-artifact-registry version: 1.21.0 @@ -523,6 +634,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-artifact-registry/ + release_exclude_paths: + - packages/google-cloud-artifact-registry/.repo-metadata.json + - packages/google-cloud-artifact-registry/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-asset version: 4.3.0 @@ -543,6 +657,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-asset/ + release_exclude_paths: + - packages/google-cloud-asset/.repo-metadata.json + - packages/google-cloud-asset/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-assured-workloads version: 2.3.0 @@ -559,6 +676,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-assured-workloads/ + release_exclude_paths: + - packages/google-cloud-assured-workloads/.repo-metadata.json + - packages/google-cloud-assured-workloads/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-audit-log version: 0.5.0 @@ -574,6 +694,9 @@ libraries: - .repo-metadata.json - README.rst - docs/summary_overview.md + release_exclude_paths: + - packages/google-cloud-audit-log/.repo-metadata.json + - packages/google-cloud-audit-log/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-auditmanager version: 0.2.0 @@ -592,6 +715,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-auditmanager + release_exclude_paths: + - packages/google-cloud-auditmanager/.repo-metadata.json + - packages/google-cloud-auditmanager/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-automl version: 2.19.0 @@ -614,6 +740,9 @@ libraries: - tests/unit/test_tables_client_v1beta1.py remove_regex: - packages/google-cloud-automl/ + release_exclude_paths: + - packages/google-cloud-automl/.repo-metadata.json + - packages/google-cloud-automl/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-backupdr version: 0.9.0 @@ -628,6 +757,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-backupdr/ + release_exclude_paths: + - packages/google-cloud-backupdr/.repo-metadata.json + - packages/google-cloud-backupdr/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-bare-metal-solution version: 1.13.0 @@ -642,6 +774,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-bare-metal-solution/ + release_exclude_paths: + - packages/google-cloud-bare-metal-solution/.repo-metadata.json + - packages/google-cloud-bare-metal-solution/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-batch version: 0.21.0 @@ -658,6 +793,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-batch/ + release_exclude_paths: + - packages/google-cloud-batch/.repo-metadata.json + - packages/google-cloud-batch/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-beyondcorp-appconnections version: 0.7.0 @@ -672,6 +810,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-beyondcorp-appconnections/ + release_exclude_paths: + - packages/google-cloud-beyondcorp-appconnections/.repo-metadata.json + - packages/google-cloud-beyondcorp-appconnections/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-beyondcorp-appconnectors version: 0.7.0 @@ -686,6 +827,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-beyondcorp-appconnectors/ + release_exclude_paths: + - packages/google-cloud-beyondcorp-appconnectors/.repo-metadata.json + - packages/google-cloud-beyondcorp-appconnectors/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-beyondcorp-appgateways version: 0.7.0 @@ -700,6 +844,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-beyondcorp-appgateways/ + release_exclude_paths: + - packages/google-cloud-beyondcorp-appgateways/.repo-metadata.json + - packages/google-cloud-beyondcorp-appgateways/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-beyondcorp-clientconnectorservices version: 0.7.0 @@ -714,6 +861,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-beyondcorp-clientconnectorservices/ + release_exclude_paths: + - packages/google-cloud-beyondcorp-clientconnectorservices/.repo-metadata.json + - packages/google-cloud-beyondcorp-clientconnectorservices/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-beyondcorp-clientgateways version: 0.7.0 @@ -728,6 +878,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-beyondcorp-clientgateways/ + release_exclude_paths: + - packages/google-cloud-beyondcorp-clientgateways/.repo-metadata.json + - packages/google-cloud-beyondcorp-clientgateways/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-biglake version: 0.3.0 @@ -742,6 +895,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-biglake + release_exclude_paths: + - packages/google-cloud-biglake/.repo-metadata.json + - packages/google-cloud-biglake/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-biglake-hive version: 0.1.0 @@ -760,6 +916,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-biglake-hive + release_exclude_paths: + - packages/google-cloud-biglake-hive/.repo-metadata.json + - packages/google-cloud-biglake-hive/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-bigquery version: 3.41.0 @@ -769,6 +928,9 @@ libraries: - packages/google-cloud-bigquery preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/google-cloud-bigquery/.repo-metadata.json + - packages/google-cloud-bigquery/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-bigquery-analyticshub version: 0.8.0 @@ -783,6 +945,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-bigquery-analyticshub/ + release_exclude_paths: + - packages/google-cloud-bigquery-analyticshub/.repo-metadata.json + - packages/google-cloud-bigquery-analyticshub/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-bigquery-biglake version: 0.7.0 @@ -799,6 +964,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-bigquery-biglake/ + release_exclude_paths: + - packages/google-cloud-bigquery-biglake/.repo-metadata.json + - packages/google-cloud-bigquery-biglake/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-bigquery-connection version: 1.21.0 @@ -814,6 +982,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-bigquery-connection/ + release_exclude_paths: + - packages/google-cloud-bigquery-connection/.repo-metadata.json + - packages/google-cloud-bigquery-connection/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-bigquery-data-exchange version: 0.8.0 @@ -828,6 +999,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-bigquery-data-exchange/ + release_exclude_paths: + - packages/google-cloud-bigquery-data-exchange/.repo-metadata.json + - packages/google-cloud-bigquery-data-exchange/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-bigquery-datapolicies version: 0.9.0 @@ -848,6 +1022,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-bigquery-datapolicies/ + release_exclude_paths: + - packages/google-cloud-bigquery-datapolicies/.repo-metadata.json + - packages/google-cloud-bigquery-datapolicies/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-bigquery-datatransfer version: 3.22.0 @@ -863,13 +1040,15 @@ libraries: - tests/system remove_regex: - packages/google-cloud-bigquery-datatransfer/ + release_exclude_paths: + - packages/google-cloud-bigquery-datatransfer/.repo-metadata.json + - packages/google-cloud-bigquery-datatransfer/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-bigquery-logging version: 1.9.0 last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd apis: - path: google/cloud/bigquery/logging/v1 - service_config: "" source_roots: - packages/google-cloud-bigquery-logging preserve_regex: @@ -878,6 +1057,9 @@ libraries: - tests/unit/gapic/bigquery_logging_v1/test_bigquery_logging_v1.py remove_regex: - packages/google-cloud-bigquery-logging/ + release_exclude_paths: + - packages/google-cloud-bigquery-logging/.repo-metadata.json + - packages/google-cloud-bigquery-logging/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-bigquery-migration version: 0.14.0 @@ -894,6 +1076,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-bigquery-migration/ + release_exclude_paths: + - packages/google-cloud-bigquery-migration/.repo-metadata.json + - packages/google-cloud-bigquery-migration/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-bigquery-reservation version: 1.23.0 @@ -909,6 +1094,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-bigquery-reservation/ + release_exclude_paths: + - packages/google-cloud-bigquery-reservation/.repo-metadata.json + - packages/google-cloud-bigquery-reservation/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-bigquery-storage version: 2.37.0 @@ -951,6 +1139,9 @@ libraries: - tests/unit/test_.*.py remove_regex: - packages/google-cloud-bigquery-storage + release_exclude_paths: + - packages/google-cloud-bigquery-storage/.repo-metadata.json + - packages/google-cloud-bigquery-storage/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-bigtable version: 2.36.0 @@ -960,6 +1151,9 @@ libraries: - packages/google-cloud-bigtable preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/google-cloud-bigtable/.repo-metadata.json + - packages/google-cloud-bigtable/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-billing version: 1.19.0 @@ -974,6 +1168,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-billing/ + release_exclude_paths: + - packages/google-cloud-billing/.repo-metadata.json + - packages/google-cloud-billing/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-billing-budgets version: 1.20.0 @@ -990,6 +1187,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-billing-budgets/ + release_exclude_paths: + - packages/google-cloud-billing-budgets/.repo-metadata.json + - packages/google-cloud-billing-budgets/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-binary-authorization version: 1.16.0 @@ -1006,6 +1206,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-binary-authorization/ + release_exclude_paths: + - packages/google-cloud-binary-authorization/.repo-metadata.json + - packages/google-cloud-binary-authorization/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-build version: 3.36.0 @@ -1022,6 +1225,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-build/ + release_exclude_paths: + - packages/google-cloud-build/.repo-metadata.json + - packages/google-cloud-build/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-capacityplanner version: 0.4.0 @@ -1036,6 +1242,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-capacityplanner/ + release_exclude_paths: + - packages/google-cloud-capacityplanner/.repo-metadata.json + - packages/google-cloud-capacityplanner/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-certificate-manager version: 1.13.0 @@ -1050,6 +1259,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-certificate-manager/ + release_exclude_paths: + - packages/google-cloud-certificate-manager/.repo-metadata.json + - packages/google-cloud-certificate-manager/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-ces version: 0.4.0 @@ -1070,6 +1282,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-ces + release_exclude_paths: + - packages/google-cloud-ces/.repo-metadata.json + - packages/google-cloud-ces/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-channel version: 1.27.0 @@ -1084,6 +1299,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-channel/ + release_exclude_paths: + - packages/google-cloud-channel/.repo-metadata.json + - packages/google-cloud-channel/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-chronicle version: 0.4.0 @@ -1098,6 +1316,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-chronicle/ + release_exclude_paths: + - packages/google-cloud-chronicle/.repo-metadata.json + - packages/google-cloud-chronicle/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-cloudcontrolspartner version: 0.5.0 @@ -1114,6 +1335,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-cloudcontrolspartner/ + release_exclude_paths: + - packages/google-cloud-cloudcontrolspartner/.repo-metadata.json + - packages/google-cloud-cloudcontrolspartner/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-cloudsecuritycompliance version: 0.6.0 @@ -1128,6 +1352,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-cloudsecuritycompliance/ + release_exclude_paths: + - packages/google-cloud-cloudsecuritycompliance/.repo-metadata.json + - packages/google-cloud-cloudsecuritycompliance/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-commerce-consumer-procurement version: 0.5.0 @@ -1144,6 +1371,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-commerce-consumer-procurement/ + release_exclude_paths: + - packages/google-cloud-commerce-consumer-procurement/.repo-metadata.json + - packages/google-cloud-commerce-consumer-procurement/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-common version: 1.9.0 @@ -1159,6 +1389,9 @@ libraries: - tests/unit/gapic/common/test_common.py remove_regex: - packages/google-cloud-common/ + release_exclude_paths: + - packages/google-cloud-common/.repo-metadata.json + - packages/google-cloud-common/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-compute version: 1.47.0 @@ -1174,6 +1407,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-compute/ + release_exclude_paths: + - packages/google-cloud-compute/.repo-metadata.json + - packages/google-cloud-compute/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-compute-v1beta version: 0.10.0 @@ -1188,6 +1424,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-compute-v1beta/ + release_exclude_paths: + - packages/google-cloud-compute-v1beta/.repo-metadata.json + - packages/google-cloud-compute-v1beta/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-confidentialcomputing version: 0.9.0 @@ -1202,6 +1441,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-confidentialcomputing/ + release_exclude_paths: + - packages/google-cloud-confidentialcomputing/.repo-metadata.json + - packages/google-cloud-confidentialcomputing/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-config version: 0.5.0 @@ -1216,6 +1458,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-config/ + release_exclude_paths: + - packages/google-cloud-config/.repo-metadata.json + - packages/google-cloud-config/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-configdelivery version: 0.4.0 @@ -1234,6 +1479,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-configdelivery/ + release_exclude_paths: + - packages/google-cloud-configdelivery/.repo-metadata.json + - packages/google-cloud-configdelivery/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-contact-center-insights version: 1.26.0 @@ -1248,6 +1496,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-contact-center-insights/ + release_exclude_paths: + - packages/google-cloud-contact-center-insights/.repo-metadata.json + - packages/google-cloud-contact-center-insights/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-container version: 2.64.0 @@ -1265,6 +1516,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-container/ + release_exclude_paths: + - packages/google-cloud-container/.repo-metadata.json + - packages/google-cloud-container/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-containeranalysis version: 2.21.0 @@ -1280,6 +1534,9 @@ libraries: - tests/unit/test_get_grafeas_client.py remove_regex: - packages/google-cloud-containeranalysis/ + release_exclude_paths: + - packages/google-cloud-containeranalysis/.repo-metadata.json + - packages/google-cloud-containeranalysis/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-contentwarehouse version: 0.10.0 @@ -1294,6 +1551,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-contentwarehouse/ + release_exclude_paths: + - packages/google-cloud-contentwarehouse/.repo-metadata.json + - packages/google-cloud-contentwarehouse/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-core version: 2.5.1 @@ -1303,6 +1563,9 @@ libraries: - packages/google-cloud-core preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/google-cloud-core/.repo-metadata.json + - packages/google-cloud-core/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-data-fusion version: 1.16.0 @@ -1317,6 +1580,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-data-fusion/ + release_exclude_paths: + - packages/google-cloud-data-fusion/.repo-metadata.json + - packages/google-cloud-data-fusion/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-data-qna version: 0.13.0 @@ -1331,6 +1597,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-data-qna/ + release_exclude_paths: + - packages/google-cloud-data-qna/.repo-metadata.json + - packages/google-cloud-data-qna/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-databasecenter version: 0.7.0 @@ -1346,6 +1615,9 @@ libraries: - scripts/client-post-processing remove_regex: - packages/google-cloud-databasecenter + release_exclude_paths: + - packages/google-cloud-databasecenter/.repo-metadata.json + - packages/google-cloud-databasecenter/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-datacatalog version: 3.30.0 @@ -1362,6 +1634,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-datacatalog/ + release_exclude_paths: + - packages/google-cloud-datacatalog/.repo-metadata.json + - packages/google-cloud-datacatalog/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-datacatalog-lineage version: 0.6.0 @@ -1376,6 +1651,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-datacatalog-lineage/ + release_exclude_paths: + - packages/google-cloud-datacatalog-lineage/.repo-metadata.json + - packages/google-cloud-datacatalog-lineage/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-datacatalog-lineage-configmanagement version: 0.1.0 @@ -1394,6 +1672,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-datacatalog-lineage-configmanagement + release_exclude_paths: + - packages/google-cloud-datacatalog-lineage-configmanagement/.repo-metadata.json + - packages/google-cloud-datacatalog-lineage-configmanagement/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-dataflow-client version: 0.13.0 @@ -1408,6 +1689,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-dataflow-client/ + release_exclude_paths: + - packages/google-cloud-dataflow-client/.repo-metadata.json + - packages/google-cloud-dataflow-client/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-dataform version: 0.10.0 @@ -1424,6 +1708,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-dataform/ + release_exclude_paths: + - packages/google-cloud-dataform/.repo-metadata.json + - packages/google-cloud-dataform/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-datalabeling version: 1.16.0 @@ -1438,6 +1725,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-datalabeling/ + release_exclude_paths: + - packages/google-cloud-datalabeling/.repo-metadata.json + - packages/google-cloud-datalabeling/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-dataplex version: 2.18.0 @@ -1452,6 +1742,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-dataplex/ + release_exclude_paths: + - packages/google-cloud-dataplex/.repo-metadata.json + - packages/google-cloud-dataplex/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-dataproc version: 5.26.0 @@ -1467,6 +1760,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-dataproc/ + release_exclude_paths: + - packages/google-cloud-dataproc/.repo-metadata.json + - packages/google-cloud-dataproc/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-dataproc-metastore version: 1.22.0 @@ -1485,6 +1781,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-dataproc-metastore/ + release_exclude_paths: + - packages/google-cloud-dataproc-metastore/.repo-metadata.json + - packages/google-cloud-dataproc-metastore/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-datastore version: 2.24.0 @@ -1546,6 +1845,9 @@ libraries: - ^packages/google-cloud-datastore/tests/unit/__init__.py - ^packages/google-cloud-datastore/tests/unit/gapic - ^packages/google-cloud-datastore/samples/generated_samples + release_exclude_paths: + - packages/google-cloud-datastore/.repo-metadata.json + - packages/google-cloud-datastore/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-datastream version: 1.18.0 @@ -1562,6 +1864,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-datastream/ + release_exclude_paths: + - packages/google-cloud-datastream/.repo-metadata.json + - packages/google-cloud-datastream/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-deploy version: 2.10.0 @@ -1576,6 +1881,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-deploy/ + release_exclude_paths: + - packages/google-cloud-deploy/.repo-metadata.json + - packages/google-cloud-deploy/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-developerconnect version: 0.5.0 @@ -1590,6 +1898,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-developerconnect/ + release_exclude_paths: + - packages/google-cloud-developerconnect/.repo-metadata.json + - packages/google-cloud-developerconnect/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-devicestreaming version: 0.4.0 @@ -1604,6 +1915,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-devicestreaming/ + release_exclude_paths: + - packages/google-cloud-devicestreaming/.repo-metadata.json + - packages/google-cloud-devicestreaming/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-dialogflow version: 2.47.0 @@ -1620,6 +1934,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-dialogflow/ + release_exclude_paths: + - packages/google-cloud-dialogflow/.repo-metadata.json + - packages/google-cloud-dialogflow/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-dialogflow-cx version: 2.5.0 @@ -1636,6 +1953,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-dialogflow-cx + release_exclude_paths: + - packages/google-cloud-dialogflow-cx/.repo-metadata.json + - packages/google-cloud-dialogflow-cx/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-discoveryengine version: 0.18.0 @@ -1654,10 +1974,13 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-discoveryengine/ + release_exclude_paths: + - packages/google-cloud-discoveryengine/.repo-metadata.json + - packages/google-cloud-discoveryengine/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-dlp version: 3.35.0 - last_generated_commit: 59d5f2b46924714af627ac29ea6de78641a00835 + last_generated_commit: cd090841ab172574e740c214c99df00aef9c0dee apis: - path: google/privacy/dlp/v2 service_config: dlp_v2.yaml @@ -1669,6 +1992,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-dlp/ + release_exclude_paths: + - packages/google-cloud-dlp/.repo-metadata.json + - packages/google-cloud-dlp/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-dms version: 1.15.0 @@ -1683,6 +2009,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-dms/ + release_exclude_paths: + - packages/google-cloud-dms/.repo-metadata.json + - packages/google-cloud-dms/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-dns version: 0.36.1 @@ -1692,6 +2021,9 @@ libraries: - packages/google-cloud-dns preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/google-cloud-dns/.repo-metadata.json + - packages/google-cloud-dns/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-documentai version: 3.14.0 @@ -1708,6 +2040,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-documentai/ + release_exclude_paths: + - packages/google-cloud-documentai/.repo-metadata.json + - packages/google-cloud-documentai/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-documentai-toolbox version: 0.15.2 @@ -1717,6 +2052,9 @@ libraries: - packages/google-cloud-documentai-toolbox preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/google-cloud-documentai-toolbox/.repo-metadata.json + - packages/google-cloud-documentai-toolbox/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-domains version: 1.13.0 @@ -1733,6 +2071,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-domains/ + release_exclude_paths: + - packages/google-cloud-domains/.repo-metadata.json + - packages/google-cloud-domains/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-edgecontainer version: 0.8.0 @@ -1747,6 +2088,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-edgecontainer/ + release_exclude_paths: + - packages/google-cloud-edgecontainer/.repo-metadata.json + - packages/google-cloud-edgecontainer/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-edgenetwork version: 0.5.0 @@ -1761,6 +2105,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-edgenetwork/ + release_exclude_paths: + - packages/google-cloud-edgenetwork/.repo-metadata.json + - packages/google-cloud-edgenetwork/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-enterpriseknowledgegraph version: 0.6.0 @@ -1775,6 +2122,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-enterpriseknowledgegraph/ + release_exclude_paths: + - packages/google-cloud-enterpriseknowledgegraph/.repo-metadata.json + - packages/google-cloud-enterpriseknowledgegraph/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-error-reporting version: 1.15.0 @@ -1812,6 +2162,9 @@ libraries: - ^packages/google-cloud-error-reporting/samples/generated_samples - ^packages/google-cloud-error-reporting/setup.py - ^packages/google-cloud-error-reporting/testing + release_exclude_paths: + - packages/google-cloud-error-reporting/.repo-metadata.json + - packages/google-cloud-error-reporting/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-essential-contacts version: 1.13.0 @@ -1826,6 +2179,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-essential-contacts/ + release_exclude_paths: + - packages/google-cloud-essential-contacts/.repo-metadata.json + - packages/google-cloud-essential-contacts/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-eventarc version: 1.20.0 @@ -1840,6 +2196,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-eventarc/ + release_exclude_paths: + - packages/google-cloud-eventarc/.repo-metadata.json + - packages/google-cloud-eventarc/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-eventarc-publishing version: 0.10.0 @@ -1854,6 +2213,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-eventarc-publishing/ + release_exclude_paths: + - packages/google-cloud-eventarc-publishing/.repo-metadata.json + - packages/google-cloud-eventarc-publishing/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-filestore version: 1.16.0 @@ -1868,6 +2230,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-filestore/ + release_exclude_paths: + - packages/google-cloud-filestore/.repo-metadata.json + - packages/google-cloud-filestore/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-financialservices version: 0.4.0 @@ -1882,6 +2247,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-financialservices/ + release_exclude_paths: + - packages/google-cloud-financialservices/.repo-metadata.json + - packages/google-cloud-financialservices/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-firestore version: 2.26.0 @@ -1890,7 +2258,6 @@ libraries: - path: google/firestore/admin/v1 service_config: firestore_v1.yaml - path: google/firestore/bundle - service_config: "" - path: google/firestore/v1 service_config: firestore_v1.yaml source_roots: @@ -1944,6 +2311,9 @@ libraries: - ^packages/google-cloud-firestore/docs/index.rst - ^packages/google-cloud-firestore/docs/README.rst - ^packages/google-cloud-firestore/docs/summary_overview.md + release_exclude_paths: + - packages/google-cloud-firestore/.repo-metadata.json + - packages/google-cloud-firestore/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-functions version: 1.23.0 @@ -1960,6 +2330,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-functions/ + release_exclude_paths: + - packages/google-cloud-functions/.repo-metadata.json + - packages/google-cloud-functions/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-gdchardwaremanagement version: 0.5.0 @@ -1974,6 +2347,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-gdchardwaremanagement/ + release_exclude_paths: + - packages/google-cloud-gdchardwaremanagement/.repo-metadata.json + - packages/google-cloud-gdchardwaremanagement/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-geminidataanalytics version: 0.12.0 @@ -1990,6 +2366,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-geminidataanalytics/ + release_exclude_paths: + - packages/google-cloud-geminidataanalytics/.repo-metadata.json + - packages/google-cloud-geminidataanalytics/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-gke-backup version: 0.8.0 @@ -2004,6 +2383,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-gke-backup/ + release_exclude_paths: + - packages/google-cloud-gke-backup/.repo-metadata.json + - packages/google-cloud-gke-backup/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-gke-connect-gateway version: 0.13.0 @@ -2020,6 +2402,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-gke-connect-gateway/ + release_exclude_paths: + - packages/google-cloud-gke-connect-gateway/.repo-metadata.json + - packages/google-cloud-gke-connect-gateway/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-gke-hub version: 1.23.0 @@ -2042,6 +2427,9 @@ libraries: - google/cloud/gkehub_v1/rbacrolebindingactuation_v1 remove_regex: - packages/google-cloud-gke-hub + release_exclude_paths: + - packages/google-cloud-gke-hub/.repo-metadata.json + - packages/google-cloud-gke-hub/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-gke-multicloud version: 0.9.0 @@ -2056,6 +2444,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-gke-multicloud + release_exclude_paths: + - packages/google-cloud-gke-multicloud/.repo-metadata.json + - packages/google-cloud-gke-multicloud/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-gkerecommender version: 0.3.0 @@ -2070,6 +2461,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-gkerecommender + release_exclude_paths: + - packages/google-cloud-gkerecommender/.repo-metadata.json + - packages/google-cloud-gkerecommender/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-gsuiteaddons version: 0.5.0 @@ -2084,6 +2478,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-gsuiteaddons + release_exclude_paths: + - packages/google-cloud-gsuiteaddons/.repo-metadata.json + - packages/google-cloud-gsuiteaddons/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-hypercomputecluster version: 0.4.0 @@ -2100,6 +2497,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-hypercomputecluster + release_exclude_paths: + - packages/google-cloud-hypercomputecluster/.repo-metadata.json + - packages/google-cloud-hypercomputecluster/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-iam version: 2.22.0 @@ -2124,13 +2524,15 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-iam + release_exclude_paths: + - packages/google-cloud-iam/.repo-metadata.json + - packages/google-cloud-iam/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-iam-logging version: 1.7.0 last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd apis: - path: google/iam/v1/logging - service_config: "" source_roots: - packages/google-cloud-iam-logging preserve_regex: @@ -2139,6 +2541,9 @@ libraries: - tests/unit/gapic/iam_logging_v1/test_iam_logging.py remove_regex: - packages/google-cloud-iam-logging/ + release_exclude_paths: + - packages/google-cloud-iam-logging/.repo-metadata.json + - packages/google-cloud-iam-logging/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-iap version: 1.21.0 @@ -2153,6 +2558,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-iap/ + release_exclude_paths: + - packages/google-cloud-iap/.repo-metadata.json + - packages/google-cloud-iap/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-ids version: 1.13.0 @@ -2167,6 +2575,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-ids/ + release_exclude_paths: + - packages/google-cloud-ids/.repo-metadata.json + - packages/google-cloud-ids/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-kms version: 3.12.0 @@ -2182,6 +2593,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-kms/ + release_exclude_paths: + - packages/google-cloud-kms/.repo-metadata.json + - packages/google-cloud-kms/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-kms-inventory version: 0.6.0 @@ -2196,6 +2610,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-kms-inventory/ + release_exclude_paths: + - packages/google-cloud-kms-inventory/.repo-metadata.json + - packages/google-cloud-kms-inventory/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-language version: 2.20.0 @@ -2215,6 +2632,9 @@ libraries: - samples/README.txt remove_regex: - packages/google-cloud-language/ + release_exclude_paths: + - packages/google-cloud-language/.repo-metadata.json + - packages/google-cloud-language/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-licensemanager version: 0.4.0 @@ -2229,6 +2649,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-licensemanager/ + release_exclude_paths: + - packages/google-cloud-licensemanager/.repo-metadata.json + - packages/google-cloud-licensemanager/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-life-sciences version: 0.12.0 @@ -2243,6 +2666,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-life-sciences/ + release_exclude_paths: + - packages/google-cloud-life-sciences/.repo-metadata.json + - packages/google-cloud-life-sciences/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-locationfinder version: 0.4.0 @@ -2257,6 +2683,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-locationfinder/ + release_exclude_paths: + - packages/google-cloud-locationfinder/.repo-metadata.json + - packages/google-cloud-locationfinder/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-logging version: 3.15.0 @@ -2301,6 +2730,9 @@ libraries: - ^packages/google-cloud-logging/tests/__init__.py - ^packages/google-cloud-logging/tests/unit/__init__.py - ^packages/google-cloud-logging/tests/unit/gapic + release_exclude_paths: + - packages/google-cloud-logging/.repo-metadata.json + - packages/google-cloud-logging/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-lustre version: 0.4.0 @@ -2315,6 +2747,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-lustre/ + release_exclude_paths: + - packages/google-cloud-lustre/.repo-metadata.json + - packages/google-cloud-lustre/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-maintenance-api version: 0.4.0 @@ -2331,6 +2766,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-maintenance-api/ + release_exclude_paths: + - packages/google-cloud-maintenance-api/.repo-metadata.json + - packages/google-cloud-maintenance-api/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-managed-identities version: 1.15.0 @@ -2345,6 +2783,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-managed-identities/ + release_exclude_paths: + - packages/google-cloud-managed-identities/.repo-metadata.json + - packages/google-cloud-managed-identities/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-managedkafka version: 0.4.0 @@ -2359,6 +2800,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-managedkafka/ + release_exclude_paths: + - packages/google-cloud-managedkafka/.repo-metadata.json + - packages/google-cloud-managedkafka/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-managedkafka-schemaregistry version: 0.4.0 @@ -2373,6 +2817,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-managedkafka-schemaregistry/ + release_exclude_paths: + - packages/google-cloud-managedkafka-schemaregistry/.repo-metadata.json + - packages/google-cloud-managedkafka-schemaregistry/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-media-translation version: 0.14.0 @@ -2387,6 +2834,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-media-translation/ + release_exclude_paths: + - packages/google-cloud-media-translation/.repo-metadata.json + - packages/google-cloud-media-translation/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-memcache version: 1.15.0 @@ -2403,10 +2853,13 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-memcache/ + release_exclude_paths: + - packages/google-cloud-memcache/.repo-metadata.json + - packages/google-cloud-memcache/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-memorystore version: 0.4.0 - last_generated_commit: 582172de2d9b6443e1fecf696167867c6d8a5fc4 + last_generated_commit: cd090841ab172574e740c214c99df00aef9c0dee apis: - path: google/cloud/memorystore/v1beta service_config: memorystore_v1beta.yaml @@ -2419,6 +2872,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-memorystore/ + release_exclude_paths: + - packages/google-cloud-memorystore/.repo-metadata.json + - packages/google-cloud-memorystore/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-migrationcenter version: 0.4.0 @@ -2433,6 +2889,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-migrationcenter/ + release_exclude_paths: + - packages/google-cloud-migrationcenter/.repo-metadata.json + - packages/google-cloud-migrationcenter/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-modelarmor version: 0.5.0 @@ -2449,6 +2908,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-modelarmor/ + release_exclude_paths: + - packages/google-cloud-modelarmor/.repo-metadata.json + - packages/google-cloud-modelarmor/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-monitoring version: 2.30.0 @@ -2469,6 +2931,9 @@ libraries: - tests/unit/test_query.py remove_regex: - packages/google-cloud-monitoring + release_exclude_paths: + - packages/google-cloud-monitoring/.repo-metadata.json + - packages/google-cloud-monitoring/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-monitoring-dashboards version: 2.21.0 @@ -2485,6 +2950,9 @@ libraries: - tests/unit/gapic/dashboard_v1 remove_regex: - packages/google-cloud-monitoring-dashboards + release_exclude_paths: + - packages/google-cloud-monitoring-dashboards/.repo-metadata.json + - packages/google-cloud-monitoring-dashboards/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-monitoring-metrics-scopes version: 1.12.0 @@ -2499,6 +2967,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-monitoring-metrics-scopes/ + release_exclude_paths: + - packages/google-cloud-monitoring-metrics-scopes/.repo-metadata.json + - packages/google-cloud-monitoring-metrics-scopes/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-ndb version: 2.4.2 @@ -2508,6 +2979,9 @@ libraries: - packages/google-cloud-ndb preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/google-cloud-ndb/.repo-metadata.json + - packages/google-cloud-ndb/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-netapp version: 0.9.0 @@ -2522,6 +2996,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-netapp/ + release_exclude_paths: + - packages/google-cloud-netapp/.repo-metadata.json + - packages/google-cloud-netapp/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-network-connectivity version: 2.15.0 @@ -2540,6 +3017,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-network-connectivity/ + release_exclude_paths: + - packages/google-cloud-network-connectivity/.repo-metadata.json + - packages/google-cloud-network-connectivity/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-network-management version: 1.34.0 @@ -2554,6 +3034,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-network-management/ + release_exclude_paths: + - packages/google-cloud-network-management/.repo-metadata.json + - packages/google-cloud-network-management/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-network-security version: 0.13.0 @@ -2572,6 +3055,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-network-security + release_exclude_paths: + - packages/google-cloud-network-security/.repo-metadata.json + - packages/google-cloud-network-security/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-network-services version: 0.9.0 @@ -2586,6 +3072,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-network-services/ + release_exclude_paths: + - packages/google-cloud-network-services/.repo-metadata.json + - packages/google-cloud-network-services/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-notebooks version: 1.16.0 @@ -2604,6 +3093,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-notebooks/ + release_exclude_paths: + - packages/google-cloud-notebooks/.repo-metadata.json + - packages/google-cloud-notebooks/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-optimization version: 1.14.0 @@ -2618,6 +3110,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-optimization/ + release_exclude_paths: + - packages/google-cloud-optimization/.repo-metadata.json + - packages/google-cloud-optimization/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-oracledatabase version: 0.5.0 @@ -2632,6 +3127,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-oracledatabase/ + release_exclude_paths: + - packages/google-cloud-oracledatabase/.repo-metadata.json + - packages/google-cloud-oracledatabase/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-orchestration-airflow version: 1.20.0 @@ -2648,13 +3146,15 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-orchestration-airflow/ + release_exclude_paths: + - packages/google-cloud-orchestration-airflow/.repo-metadata.json + - packages/google-cloud-orchestration-airflow/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-org-policy version: 1.17.0 last_generated_commit: 55319b058f8a0e46bbeeff30e374e4b1f081f494 apis: - path: google/cloud/orgpolicy/v1 - service_config: "" - path: google/cloud/orgpolicy/v2 service_config: orgpolicy_v2.yaml source_roots: @@ -2667,6 +3167,9 @@ libraries: - tests/unit/test_packaging.py remove_regex: - packages/google-cloud-org-policy + release_exclude_paths: + - packages/google-cloud-org-policy/.repo-metadata.json + - packages/google-cloud-org-policy/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-os-config version: 1.24.0 @@ -2684,6 +3187,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-os-config/ + release_exclude_paths: + - packages/google-cloud-os-config/.repo-metadata.json + - packages/google-cloud-os-config/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-os-login version: 2.20.0 @@ -2700,6 +3206,9 @@ libraries: - docs/oslogin_v1/common/types.rst remove_regex: - packages/google-cloud-os-login + release_exclude_paths: + - packages/google-cloud-os-login/.repo-metadata.json + - packages/google-cloud-os-login/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-parallelstore version: 0.6.0 @@ -2716,6 +3225,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-parallelstore/ + release_exclude_paths: + - packages/google-cloud-parallelstore/.repo-metadata.json + - packages/google-cloud-parallelstore/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-parametermanager version: 0.4.0 @@ -2730,6 +3242,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-parametermanager/ + release_exclude_paths: + - packages/google-cloud-parametermanager/.repo-metadata.json + - packages/google-cloud-parametermanager/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-phishing-protection version: 1.17.0 @@ -2744,6 +3259,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-phishing-protection/ + release_exclude_paths: + - packages/google-cloud-phishing-protection/.repo-metadata.json + - packages/google-cloud-phishing-protection/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-policy-troubleshooter version: 1.16.0 @@ -2758,6 +3276,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-policy-troubleshooter/ + release_exclude_paths: + - packages/google-cloud-policy-troubleshooter/.repo-metadata.json + - packages/google-cloud-policy-troubleshooter/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-policysimulator version: 0.4.0 @@ -2772,6 +3293,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-policysimulator/ + release_exclude_paths: + - packages/google-cloud-policysimulator/.repo-metadata.json + - packages/google-cloud-policysimulator/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-policytroubleshooter-iam version: 0.4.0 @@ -2786,6 +3310,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-policytroubleshooter-iam/ + release_exclude_paths: + - packages/google-cloud-policytroubleshooter-iam/.repo-metadata.json + - packages/google-cloud-policytroubleshooter-iam/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-private-ca version: 1.18.0 @@ -2802,6 +3329,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-private-ca/ + release_exclude_paths: + - packages/google-cloud-private-ca/.repo-metadata.json + - packages/google-cloud-private-ca/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-private-catalog version: 0.12.0 @@ -2816,6 +3346,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-private-catalog/ + release_exclude_paths: + - packages/google-cloud-private-catalog/.repo-metadata.json + - packages/google-cloud-private-catalog/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-privilegedaccessmanager version: 0.4.0 @@ -2830,10 +3363,13 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-privilegedaccessmanager/ + release_exclude_paths: + - packages/google-cloud-privilegedaccessmanager/.repo-metadata.json + - packages/google-cloud-privilegedaccessmanager/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-pubsub version: 2.36.0 - last_generated_commit: 256b575f6915282b20795c13414b21f2c0af65db + last_generated_commit: cd090841ab172574e740c214c99df00aef9c0dee apis: - path: google/pubsub/v1 service_config: pubsub_v1.yaml @@ -2866,6 +3402,9 @@ libraries: - ^packages/google-cloud-pubsub/tests/unit/gapic - ^packages/google-cloud-pubsub/samples/generated_samples - ^packages/google-cloud-pubsub/docs/pubsub_v1 + release_exclude_paths: + - packages/google-cloud-pubsub/.repo-metadata.json + - packages/google-cloud-pubsub/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-quotas version: 0.6.0 @@ -2882,6 +3421,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-quotas/ + release_exclude_paths: + - packages/google-cloud-quotas/.repo-metadata.json + - packages/google-cloud-quotas/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-rapidmigrationassessment version: 0.4.0 @@ -2896,6 +3438,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-rapidmigrationassessment/ + release_exclude_paths: + - packages/google-cloud-rapidmigrationassessment/.repo-metadata.json + - packages/google-cloud-rapidmigrationassessment/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-recaptcha-enterprise version: 1.31.0 @@ -2910,6 +3455,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-recaptcha-enterprise/ + release_exclude_paths: + - packages/google-cloud-recaptcha-enterprise/.repo-metadata.json + - packages/google-cloud-recaptcha-enterprise/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-recommendations-ai version: 0.13.0 @@ -2924,6 +3472,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-recommendations-ai/ + release_exclude_paths: + - packages/google-cloud-recommendations-ai/.repo-metadata.json + - packages/google-cloud-recommendations-ai/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-recommender version: 2.21.0 @@ -2940,6 +3491,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-recommender/ + release_exclude_paths: + - packages/google-cloud-recommender/.repo-metadata.json + - packages/google-cloud-recommender/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-redis version: 2.21.0 @@ -2956,6 +3510,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-redis/ + release_exclude_paths: + - packages/google-cloud-redis/.repo-metadata.json + - packages/google-cloud-redis/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-redis-cluster version: 0.4.0 @@ -2972,6 +3529,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-redis-cluster/ + release_exclude_paths: + - packages/google-cloud-redis-cluster/.repo-metadata.json + - packages/google-cloud-redis-cluster/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-resource-manager version: 1.17.0 @@ -2986,6 +3546,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-resource-manager/ + release_exclude_paths: + - packages/google-cloud-resource-manager/.repo-metadata.json + - packages/google-cloud-resource-manager/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-retail version: 2.10.0 @@ -3004,6 +3567,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-retail/ + release_exclude_paths: + - packages/google-cloud-retail/.repo-metadata.json + - packages/google-cloud-retail/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-run version: 0.16.0 @@ -3018,6 +3584,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-run/ + release_exclude_paths: + - packages/google-cloud-run/.repo-metadata.json + - packages/google-cloud-run/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-runtimeconfig version: 0.36.1 @@ -3027,6 +3596,9 @@ libraries: - packages/google-cloud-runtimeconfig preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/google-cloud-runtimeconfig/.repo-metadata.json + - packages/google-cloud-runtimeconfig/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-saasplatform-saasservicemgmt version: 0.5.0 @@ -3041,6 +3613,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-saasplatform-saasservicemgmt/ + release_exclude_paths: + - packages/google-cloud-saasplatform-saasservicemgmt/.repo-metadata.json + - packages/google-cloud-saasplatform-saasservicemgmt/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-scheduler version: 2.19.0 @@ -3058,6 +3633,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-scheduler/ + release_exclude_paths: + - packages/google-cloud-scheduler/.repo-metadata.json + - packages/google-cloud-scheduler/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-secret-manager version: 2.27.0 @@ -3076,6 +3654,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-secret-manager + release_exclude_paths: + - packages/google-cloud-secret-manager/.repo-metadata.json + - packages/google-cloud-secret-manager/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-securesourcemanager version: 0.5.0 @@ -3090,6 +3671,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-securesourcemanager/ + release_exclude_paths: + - packages/google-cloud-securesourcemanager/.repo-metadata.json + - packages/google-cloud-securesourcemanager/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-security-publicca version: 0.6.0 @@ -3106,6 +3690,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-security-publicca/ + release_exclude_paths: + - packages/google-cloud-security-publicca/.repo-metadata.json + - packages/google-cloud-security-publicca/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-securitycenter version: 1.44.0 @@ -3126,6 +3713,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-securitycenter/ + release_exclude_paths: + - packages/google-cloud-securitycenter/.repo-metadata.json + - packages/google-cloud-securitycenter/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-securitycentermanagement version: 0.4.0 @@ -3140,6 +3730,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-securitycentermanagement/ + release_exclude_paths: + - packages/google-cloud-securitycentermanagement/.repo-metadata.json + - packages/google-cloud-securitycentermanagement/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-service-control version: 1.19.0 @@ -3156,6 +3749,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-service-control/ + release_exclude_paths: + - packages/google-cloud-service-control/.repo-metadata.json + - packages/google-cloud-service-control/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-service-directory version: 1.17.0 @@ -3172,6 +3768,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-service-directory/ + release_exclude_paths: + - packages/google-cloud-service-directory/.repo-metadata.json + - packages/google-cloud-service-directory/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-service-management version: 1.16.0 @@ -3186,6 +3785,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-service-management/ + release_exclude_paths: + - packages/google-cloud-service-management/.repo-metadata.json + - packages/google-cloud-service-management/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-service-usage version: 1.16.0 @@ -3200,6 +3802,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-service-usage/ + release_exclude_paths: + - packages/google-cloud-service-usage/.repo-metadata.json + - packages/google-cloud-service-usage/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-servicehealth version: 0.4.0 @@ -3214,6 +3819,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-servicehealth/ + release_exclude_paths: + - packages/google-cloud-servicehealth/.repo-metadata.json + - packages/google-cloud-servicehealth/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-shell version: 1.15.0 @@ -3228,13 +3836,15 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-shell/ + release_exclude_paths: + - packages/google-cloud-shell/.repo-metadata.json + - packages/google-cloud-shell/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-source-context version: 1.10.0 last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd apis: - path: google/devtools/source/v1 - service_config: "" source_roots: - packages/google-cloud-source-context preserve_regex: @@ -3243,6 +3853,9 @@ libraries: - tests/unit/gapic/source_context_v1/test_source_context_v1.py remove_regex: - packages/google-cloud-source-context/ + release_exclude_paths: + - packages/google-cloud-source-context/.repo-metadata.json + - packages/google-cloud-source-context/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-spanner version: 3.64.0 @@ -3325,6 +3938,9 @@ libraries: - ^packages/google-cloud-spanner/tests/unit/gapic/spanner_v1 - ^packages/google-cloud-spanner/tests/unit/gapic/__init__.py - ^packages/google-cloud-spanner/samples/generated_samples + release_exclude_paths: + - packages/google-cloud-spanner/.repo-metadata.json + - packages/google-cloud-spanner/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-speech version: 2.38.0 @@ -3346,10 +3962,13 @@ libraries: - tests/unit/test_helpers.py remove_regex: - packages/google-cloud-speech/ + release_exclude_paths: + - packages/google-cloud-speech/.repo-metadata.json + - packages/google-cloud-speech/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-storage version: 3.10.1 - last_generated_commit: 280bed6c219637610d09cebf696958dd99fd2f76 + last_generated_commit: cd090841ab172574e740c214c99df00aef9c0dee apis: - path: google/storage/v2 service_config: storage_v2.yaml @@ -3382,6 +4001,9 @@ libraries: - ^packages/google-cloud-storage/docs/_storage - ^packages/google-cloud-storage/docs/summary_overview.md - ^packages/google-cloud-storage/docs/multiprocessing.rst + release_exclude_paths: + - packages/google-cloud-storage/.repo-metadata.json + - packages/google-cloud-storage/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-storage-control version: 1.11.0 @@ -3396,6 +4018,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-storage-control/ + release_exclude_paths: + - packages/google-cloud-storage-control/.repo-metadata.json + - packages/google-cloud-storage-control/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-storage-transfer version: 1.20.0 @@ -3410,6 +4035,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-storage-transfer/ + release_exclude_paths: + - packages/google-cloud-storage-transfer/.repo-metadata.json + - packages/google-cloud-storage-transfer/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-storagebatchoperations version: 0.6.0 @@ -3424,6 +4052,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-storagebatchoperations/ + release_exclude_paths: + - packages/google-cloud-storagebatchoperations/.repo-metadata.json + - packages/google-cloud-storagebatchoperations/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-storageinsights version: 0.4.0 @@ -3438,6 +4069,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-storageinsights/ + release_exclude_paths: + - packages/google-cloud-storageinsights/.repo-metadata.json + - packages/google-cloud-storageinsights/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-support version: 0.4.0 @@ -3454,6 +4088,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-support/ + release_exclude_paths: + - packages/google-cloud-support/.repo-metadata.json + - packages/google-cloud-support/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-talent version: 2.20.0 @@ -3470,6 +4107,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-talent/ + release_exclude_paths: + - packages/google-cloud-talent/.repo-metadata.json + - packages/google-cloud-talent/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-tasks version: 2.22.0 @@ -3490,6 +4130,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-tasks/ + release_exclude_paths: + - packages/google-cloud-tasks/.repo-metadata.json + - packages/google-cloud-tasks/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-telcoautomation version: 0.5.0 @@ -3507,6 +4150,9 @@ libraries: - snippets/README.md remove_regex: - packages/google-cloud-telcoautomation/ + release_exclude_paths: + - packages/google-cloud-telcoautomation/.repo-metadata.json + - packages/google-cloud-telcoautomation/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-testutils version: 1.7.1 @@ -3516,6 +4162,9 @@ libraries: - packages/google-cloud-testutils preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/google-cloud-testutils/.repo-metadata.json + - packages/google-cloud-testutils/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-texttospeech version: 2.36.0 @@ -3533,6 +4182,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-texttospeech/ + release_exclude_paths: + - packages/google-cloud-texttospeech/.repo-metadata.json + - packages/google-cloud-texttospeech/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-tpu version: 1.26.0 @@ -3551,6 +4203,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-tpu/ + release_exclude_paths: + - packages/google-cloud-tpu/.repo-metadata.json + - packages/google-cloud-tpu/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-trace version: 1.19.0 @@ -3567,6 +4222,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-trace/ + release_exclude_paths: + - packages/google-cloud-trace/.repo-metadata.json + - packages/google-cloud-trace/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-translate version: 3.25.0 @@ -3588,6 +4246,9 @@ libraries: - tests/unit/v2 remove_regex: - packages/google-cloud-translate/ + release_exclude_paths: + - packages/google-cloud-translate/.repo-metadata.json + - packages/google-cloud-translate/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-vectorsearch version: 0.9.0 @@ -3604,6 +4265,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-vectorsearch + release_exclude_paths: + - packages/google-cloud-vectorsearch/.repo-metadata.json + - packages/google-cloud-vectorsearch/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-video-live-stream version: 1.16.0 @@ -3618,6 +4282,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-video-live-stream/ + release_exclude_paths: + - packages/google-cloud-video-live-stream/.repo-metadata.json + - packages/google-cloud-video-live-stream/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-video-stitcher version: 0.11.0 @@ -3632,6 +4299,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-video-stitcher/ + release_exclude_paths: + - packages/google-cloud-video-stitcher/.repo-metadata.json + - packages/google-cloud-video-stitcher/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-video-transcoder version: 1.20.0 @@ -3646,6 +4316,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-video-transcoder/ + release_exclude_paths: + - packages/google-cloud-video-transcoder/.repo-metadata.json + - packages/google-cloud-video-transcoder/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-videointelligence version: 2.19.0 @@ -3669,6 +4342,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-videointelligence/ + release_exclude_paths: + - packages/google-cloud-videointelligence/.repo-metadata.json + - packages/google-cloud-videointelligence/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-vision version: 3.13.0 @@ -3695,6 +4371,9 @@ libraries: - tests/unit/test_helpers.py remove_regex: - packages/google-cloud-vision/ + release_exclude_paths: + - packages/google-cloud-vision/.repo-metadata.json + - packages/google-cloud-vision/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-visionai version: 0.5.0 @@ -3711,6 +4390,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-visionai/ + release_exclude_paths: + - packages/google-cloud-visionai/.repo-metadata.json + - packages/google-cloud-visionai/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-vm-migration version: 1.16.0 @@ -3725,6 +4407,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-vm-migration/ + release_exclude_paths: + - packages/google-cloud-vm-migration/.repo-metadata.json + - packages/google-cloud-vm-migration/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-vmwareengine version: 1.11.0 @@ -3739,6 +4424,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-vmwareengine/ + release_exclude_paths: + - packages/google-cloud-vmwareengine/.repo-metadata.json + - packages/google-cloud-vmwareengine/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-vpc-access version: 1.16.0 @@ -3753,6 +4441,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-vpc-access/ + release_exclude_paths: + - packages/google-cloud-vpc-access/.repo-metadata.json + - packages/google-cloud-vpc-access/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-webrisk version: 1.21.0 @@ -3769,6 +4460,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-webrisk/ + release_exclude_paths: + - packages/google-cloud-webrisk/.repo-metadata.json + - packages/google-cloud-webrisk/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-websecurityscanner version: 1.20.0 @@ -3787,6 +4481,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-websecurityscanner/ + release_exclude_paths: + - packages/google-cloud-websecurityscanner/.repo-metadata.json + - packages/google-cloud-websecurityscanner/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-workflows version: 1.21.0 @@ -3807,6 +4504,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-workflows/ + release_exclude_paths: + - packages/google-cloud-workflows/.repo-metadata.json + - packages/google-cloud-workflows/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-workloadmanager version: 0.2.0 @@ -3825,6 +4525,9 @@ libraries: - tests/system remove_regex: - packages/google-cloud-workloadmanager + release_exclude_paths: + - packages/google-cloud-workloadmanager/.repo-metadata.json + - packages/google-cloud-workloadmanager/docs/README.rst tag_format: '{id}-v{version}' - id: google-cloud-workstations version: 0.8.0 @@ -3841,6 +4544,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-cloud-workstations/ + release_exclude_paths: + - packages/google-cloud-workstations/.repo-metadata.json + - packages/google-cloud-workstations/docs/README.rst tag_format: '{id}-v{version}' - id: google-crc32c version: 1.8.0 @@ -3850,6 +4556,9 @@ libraries: - packages/google-crc32c preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/google-crc32c/.repo-metadata.json + - packages/google-crc32c/docs/README.rst tag_format: '{id}-v{version}' - id: google-geo-type version: 0.6.0 @@ -3865,6 +4574,9 @@ libraries: - tests/unit/gapic/type/test_type.py remove_regex: - packages/google-geo-type + release_exclude_paths: + - packages/google-geo-type/.repo-metadata.json + - packages/google-geo-type/docs/README.rst tag_format: '{id}-v{version}' - id: google-maps-addressvalidation version: 0.6.0 @@ -3879,6 +4591,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-maps-addressvalidation + release_exclude_paths: + - packages/google-maps-addressvalidation/.repo-metadata.json + - packages/google-maps-addressvalidation/docs/README.rst tag_format: '{id}-v{version}' - id: google-maps-areainsights version: 0.4.0 @@ -3893,6 +4608,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-maps-areainsights + release_exclude_paths: + - packages/google-maps-areainsights/.repo-metadata.json + - packages/google-maps-areainsights/docs/README.rst tag_format: '{id}-v{version}' - id: google-maps-fleetengine version: 0.5.0 @@ -3907,6 +4625,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-maps-fleetengine + release_exclude_paths: + - packages/google-maps-fleetengine/.repo-metadata.json + - packages/google-maps-fleetengine/docs/README.rst tag_format: '{id}-v{version}' - id: google-maps-fleetengine-delivery version: 0.5.0 @@ -3921,6 +4642,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-maps-fleetengine-delivery + release_exclude_paths: + - packages/google-maps-fleetengine-delivery/.repo-metadata.json + - packages/google-maps-fleetengine-delivery/docs/README.rst tag_format: '{id}-v{version}' - id: google-maps-geocode version: 0.2.0 @@ -3939,6 +4663,9 @@ libraries: - tests/system remove_regex: - packages/google-maps-geocode + release_exclude_paths: + - packages/google-maps-geocode/.repo-metadata.json + - packages/google-maps-geocode/docs/README.rst tag_format: '{id}-v{version}' - id: google-maps-mapsplatformdatasets version: 0.7.0 @@ -3953,6 +4680,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-maps-mapsplatformdatasets + release_exclude_paths: + - packages/google-maps-mapsplatformdatasets/.repo-metadata.json + - packages/google-maps-mapsplatformdatasets/docs/README.rst tag_format: '{id}-v{version}' - id: google-maps-navconnect version: 0.1.0 @@ -3971,6 +4701,9 @@ libraries: - tests/system remove_regex: - packages/google-maps-navconnect + release_exclude_paths: + - packages/google-maps-navconnect/.repo-metadata.json + - packages/google-maps-navconnect/docs/README.rst tag_format: '{id}-v{version}' - id: google-maps-places version: 0.8.0 @@ -3985,6 +4718,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-maps-places + release_exclude_paths: + - packages/google-maps-places/.repo-metadata.json + - packages/google-maps-places/docs/README.rst tag_format: '{id}-v{version}' - id: google-maps-routeoptimization version: 0.4.0 @@ -3999,6 +4735,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-maps-routeoptimization + release_exclude_paths: + - packages/google-maps-routeoptimization/.repo-metadata.json + - packages/google-maps-routeoptimization/docs/README.rst tag_format: '{id}-v{version}' - id: google-maps-routing version: 0.10.0 @@ -4013,6 +4752,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-maps-routing + release_exclude_paths: + - packages/google-maps-routing/.repo-metadata.json + - packages/google-maps-routing/docs/README.rst tag_format: '{id}-v{version}' - id: google-maps-solar version: 0.5.0 @@ -4027,6 +4769,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-maps-solar + release_exclude_paths: + - packages/google-maps-solar/.repo-metadata.json + - packages/google-maps-solar/docs/README.rst tag_format: '{id}-v{version}' - id: google-resumable-media version: 2.8.2 @@ -4036,6 +4781,9 @@ libraries: - packages/google-resumable-media preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/google-resumable-media/.repo-metadata.json + - packages/google-resumable-media/docs/README.rst tag_format: '{id}-v{version}' - id: google-shopping-css version: 0.5.0 @@ -4050,6 +4798,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-shopping-css/ + release_exclude_paths: + - packages/google-shopping-css/.repo-metadata.json + - packages/google-shopping-css/docs/README.rst tag_format: '{id}-v{version}' - id: google-shopping-merchant-accounts version: 1.5.0 @@ -4066,6 +4817,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-shopping-merchant-accounts/ + release_exclude_paths: + - packages/google-shopping-merchant-accounts/.repo-metadata.json + - packages/google-shopping-merchant-accounts/docs/README.rst tag_format: '{id}-v{version}' - id: google-shopping-merchant-conversions version: 1.3.0 @@ -4082,6 +4836,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-shopping-merchant-conversions/ + release_exclude_paths: + - packages/google-shopping-merchant-conversions/.repo-metadata.json + - packages/google-shopping-merchant-conversions/docs/README.rst tag_format: '{id}-v{version}' - id: google-shopping-merchant-datasources version: 1.4.0 @@ -4098,6 +4855,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-shopping-merchant-datasources/ + release_exclude_paths: + - packages/google-shopping-merchant-datasources/.repo-metadata.json + - packages/google-shopping-merchant-datasources/docs/README.rst tag_format: '{id}-v{version}' - id: google-shopping-merchant-inventories version: 1.3.0 @@ -4114,6 +4874,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-shopping-merchant-inventories/ + release_exclude_paths: + - packages/google-shopping-merchant-inventories/.repo-metadata.json + - packages/google-shopping-merchant-inventories/docs/README.rst tag_format: '{id}-v{version}' - id: google-shopping-merchant-issueresolution version: 1.3.0 @@ -4130,6 +4893,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-shopping-merchant-issueresolution/ + release_exclude_paths: + - packages/google-shopping-merchant-issueresolution/.repo-metadata.json + - packages/google-shopping-merchant-issueresolution/docs/README.rst tag_format: '{id}-v{version}' - id: google-shopping-merchant-lfp version: 1.3.0 @@ -4146,6 +4912,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-shopping-merchant-lfp/ + release_exclude_paths: + - packages/google-shopping-merchant-lfp/.repo-metadata.json + - packages/google-shopping-merchant-lfp/docs/README.rst tag_format: '{id}-v{version}' - id: google-shopping-merchant-notifications version: 1.3.0 @@ -4162,6 +4931,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-shopping-merchant-notifications/ + release_exclude_paths: + - packages/google-shopping-merchant-notifications/.repo-metadata.json + - packages/google-shopping-merchant-notifications/docs/README.rst tag_format: '{id}-v{version}' - id: google-shopping-merchant-ordertracking version: 1.3.0 @@ -4178,6 +4950,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-shopping-merchant-ordertracking/ + release_exclude_paths: + - packages/google-shopping-merchant-ordertracking/.repo-metadata.json + - packages/google-shopping-merchant-ordertracking/docs/README.rst tag_format: '{id}-v{version}' - id: google-shopping-merchant-products version: 1.5.0 @@ -4194,6 +4969,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-shopping-merchant-products/ + release_exclude_paths: + - packages/google-shopping-merchant-products/.repo-metadata.json + - packages/google-shopping-merchant-products/docs/README.rst tag_format: '{id}-v{version}' - id: google-shopping-merchant-productstudio version: 0.4.0 @@ -4208,6 +4986,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-shopping-merchant-productstudio/ + release_exclude_paths: + - packages/google-shopping-merchant-productstudio/.repo-metadata.json + - packages/google-shopping-merchant-productstudio/docs/README.rst tag_format: '{id}-v{version}' - id: google-shopping-merchant-promotions version: 1.3.0 @@ -4224,6 +5005,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-shopping-merchant-promotions/ + release_exclude_paths: + - packages/google-shopping-merchant-promotions/.repo-metadata.json + - packages/google-shopping-merchant-promotions/docs/README.rst tag_format: '{id}-v{version}' - id: google-shopping-merchant-quota version: 1.4.0 @@ -4240,6 +5024,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-shopping-merchant-quota/ + release_exclude_paths: + - packages/google-shopping-merchant-quota/.repo-metadata.json + - packages/google-shopping-merchant-quota/docs/README.rst tag_format: '{id}-v{version}' - id: google-shopping-merchant-reports version: 1.3.0 @@ -4258,6 +5045,9 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-shopping-merchant-reports/ + release_exclude_paths: + - packages/google-shopping-merchant-reports/.repo-metadata.json + - packages/google-shopping-merchant-reports/docs/README.rst tag_format: '{id}-v{version}' - id: google-shopping-merchant-reviews version: 0.5.0 @@ -4272,13 +5062,15 @@ libraries: - docs/CHANGELOG.md remove_regex: - packages/google-shopping-merchant-reviews/ + release_exclude_paths: + - packages/google-shopping-merchant-reviews/.repo-metadata.json + - packages/google-shopping-merchant-reviews/docs/README.rst tag_format: '{id}-v{version}' - id: google-shopping-type version: 1.4.0 last_generated_commit: 6df3ecf4fd43b64826de6a477d1a535ec18b0d7c apis: - path: google/shopping/type - service_config: "" source_roots: - packages/google-shopping-type preserve_regex: @@ -4287,6 +5079,9 @@ libraries: - tests/unit/gapic/type/test_type.py remove_regex: - packages/google-shopping-type/ + release_exclude_paths: + - packages/google-shopping-type/.repo-metadata.json + - packages/google-shopping-type/docs/README.rst tag_format: '{id}-v{version}' - id: googleapis-common-protos version: 1.74.0 @@ -4295,15 +5090,12 @@ libraries: - path: google/api service_config: serviceconfig.yaml - path: google/cloud - service_config: "" - path: google/cloud/location service_config: cloud.yaml - path: google/logging/type - service_config: "" - path: google/rpc service_config: rpc_publish.yaml - path: google/rpc/context - service_config: "" - path: google/type service_config: type.yaml source_roots: @@ -4314,6 +5106,9 @@ libraries: - .repo-metadata.json - README.rst - docs/summary_overview.md + release_exclude_paths: + - packages/googleapis-common-protos/.repo-metadata.json + - packages/googleapis-common-protos/docs/README.rst tag_format: '{id}-v{version}' - id: grafeas version: 1.22.0 @@ -4331,6 +5126,9 @@ libraries: - grafeas/grafeas/grafeas_v1/types.py remove_regex: - packages/grafeas + release_exclude_paths: + - packages/grafeas/.repo-metadata.json + - packages/grafeas/docs/README.rst tag_format: '{id}-v{version}' - id: grpc-google-iam-v1 version: 0.14.4 @@ -4346,6 +5144,9 @@ libraries: - .repo-metadata.json - README.rst - docs/summary_overview.md + release_exclude_paths: + - packages/grpc-google-iam-v1/.repo-metadata.json + - packages/grpc-google-iam-v1/docs/README.rst tag_format: '{id}-v{version}' - id: pandas-gbq version: 0.34.1 @@ -4355,6 +5156,9 @@ libraries: - packages/pandas-gbq preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/pandas-gbq/.repo-metadata.json + - packages/pandas-gbq/docs/README.rst tag_format: '{id}-v{version}' - id: proto-plus version: 1.27.2 @@ -4364,6 +5168,9 @@ libraries: - packages/proto-plus preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/proto-plus/.repo-metadata.json + - packages/proto-plus/docs/README.rst tag_format: '{id}-v{version}' - id: sqlalchemy-bigquery version: 1.16.0 @@ -4373,6 +5180,9 @@ libraries: - packages/sqlalchemy-bigquery preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/sqlalchemy-bigquery/.repo-metadata.json + - packages/sqlalchemy-bigquery/docs/README.rst tag_format: '{id}-v{version}' - id: sqlalchemy-spanner version: 1.17.3 @@ -4382,4 +5192,7 @@ libraries: - packages/sqlalchemy-spanner preserve_regex: [] remove_regex: [] + release_exclude_paths: + - packages/sqlalchemy-spanner/.repo-metadata.json + - packages/sqlalchemy-spanner/docs/README.rst tag_format: '{id}-v{version}' diff --git a/librarian.yaml b/librarian.yaml new file mode 100644 index 000000000000..a1af40bf6d6f --- /dev/null +++ b/librarian.yaml @@ -0,0 +1,4716 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +language: python +version: v1.0.2-0.20260407070428-2f1c4b11f1bd +repo: googleapis/google-cloud-python +sources: + googleapis: + commit: cd090841ab172574e740c214c99df00aef9c0dee + sha256: 08e4b7744dc23b6e3320a3f1d05db9f40853aaf1089d06bfb8d79044b7a66f21 +release: + ignored_changes: + - .repo-metadata.json + - docs/README.rst +default: + output: packages + tag_format: '{name}: v{version}' + python: + common_gapic_paths: + - samples/generated_samples + - tests/unit/gapic + - testing + - '{neutral-source}/__init__.py' + - '{neutral-source}/gapic_version.py' + - '{neutral-source}/py.typed' + - tests/unit/__init__.py + - tests/__init__.py + - setup.py + - noxfile.py + - .coveragerc + - .flake8 + - .repo-metadata.json + - mypy.ini + - README.rst + - LICENSE + - MANIFEST.in + - docs/_static/custom.css + - docs/_templates/layout.html + - docs/conf.py + - docs/index.rst + - docs/multiprocessing.rst + - docs/README.rst + - docs/summary_overview.md + library_type: GAPIC_AUTO +libraries: + - name: bigframes + version: 2.39.0 + python: + library_type: INTEGRATION + name_pretty_override: A unified Python API in BigQuery + product_documentation_override: https://cloud.google.com/bigquery + api_shortname_override: bigquery + api_id_override: bigquery.googleapis.com + client_documentation_override: https://cloud.google.com/python/docs/reference/bigframes/latest + issue_tracker_override: https://github.com/googleapis/python-bigquery-dataframes/issues + - name: bigquery-magics + version: 0.12.2 + python: + library_type: INTEGRATION + name_pretty_override: Google BigQuery connector for Jupyter and IPython + product_documentation_override: https://cloud.google.com/bigquery + api_id_override: bigquery.googleapis.com + client_documentation_override: https://googleapis.dev/python/bigquery-magics/latest/ + issue_tracker_override: https://github.com/googleapis/python-bigquery-magics/issues + - name: db-dtypes + version: 1.5.1 + description_override: Pandas extension data types for data from SQL systems such as BigQuery. + python: + library_type: INTEGRATION + name_pretty_override: Pandas Data Types for SQL systems (BigQuery, Spanner) + product_documentation_override: https://pandas.pydata.org/pandas-docs/stable/ecosystem.html#ecosystem-extensions + api_id_override: bigquery.googleapis.com + client_documentation_override: https://googleapis.dev/python/db-dtypes/latest/index.html + - name: django-google-spanner + version: 4.0.3 + python: + library_type: INTEGRATION + name_pretty_override: Cloud Spanner Django + product_documentation_override: https://cloud.google.com/spanner/docs/ + api_shortname_override: django-google-spanner + issue_tracker_override: https://issuetracker.google.com/issues?q=componentid:190851%2B%20status:open + - name: gapic-generator + version: 1.30.14 + python: + library_type: CORE + name_pretty_override: Google API Client Generator for Python + client_documentation_override: https://gapic-generator-python.readthedocs.io/en/stable/ + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + skip_readme_copy: true + - name: gcp-sphinx-docfx-yaml + version: 3.2.5 + python: + library_type: OTHER + name_pretty_override: Sphinx DocFX YAML Generator + product_documentation_override: https://github.com/googleapis/sphinx-docfx-yaml + client_documentation_override: https://github.com/googleapis/sphinx-docfx-yaml + issue_tracker_override: https://github.com/googleapis/sphinx-docfx-yaml/issues + skip_readme_copy: true + - name: google-ads-admanager + version: 0.9.0 + apis: + - path: google/ads/admanager/v1 + description_override: Manage your Ad Manager inventory, run reports and more. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + default_version: v1 + - name: google-ads-datamanager + version: 0.8.0 + apis: + - path: google/ads/datamanager/v1 + description_override: |- + A unified ingestion API for data partners, agencies and advertisers to + connect first-party data across Google advertising products. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Data Manager API + client_documentation_override: https://cloud.google.com/python/docs/reference/google-ads-datamanager/latest + default_version: v1 + - name: google-ads-marketingplatform-admin + version: 0.5.0 + apis: + - path: google/marketingplatform/admin/v1alpha + description_override: The Google Marketing Platform Admin API allows for programmatic access to the Google Marketing Platform configuration data. You can use the Google Marketing Platform Admin API to manage links between your Google Marketing Platform organization and Google Analytics accounts, and to set the service level of your GA4 properties. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/marketingplatform/admin/v1alpha: + - python-gapic-namespace=google.ads + - python-gapic-name=marketingplatform_admin + - warehouse-package-name=google-ads-marketingplatform-admin + name_pretty_override: Google Marketing Platform Admin API + api_shortname_override: marketingplatformadmin + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1alpha + - name: google-ai-generativelanguage + version: 0.11.0 + apis: + - path: google/ai/generativelanguage/v1 + - path: google/ai/generativelanguage/v1beta3 + - path: google/ai/generativelanguage/v1beta2 + - path: google/ai/generativelanguage/v1beta + - path: google/ai/generativelanguage/v1alpha + description_override: The Gemini API allows developers to build generative AI applications using Gemini models. Gemini is our most capable model, built from the ground up to be multimodal. It can generalize and seamlessly understand, operate across, and combine different types of information including language, images, audio, video, and code. You can use the Gemini API for use cases like reasoning across text and images, content generation, dialogue agents, summarization and classification systems, and more. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Generative Language API + metadata_name_override: generativelanguage + default_version: v1beta + - name: google-analytics-admin + version: 0.28.0 + apis: + - path: google/analytics/admin/v1beta + - path: google/analytics/admin/v1alpha + description_override: allows you to manage Google Analytics accounts and properties. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/analytics/admin/v1alpha: + - autogen-snippets=False + name_pretty_override: Analytics Admin + metadata_name_override: analyticsadmin + default_version: v1alpha + - name: google-analytics-data + version: 0.21.0 + apis: + - path: google/analytics/data/v1beta + - path: google/analytics/data/v1alpha + description_override: provides programmatic methods to access report data in Google Analytics App+Web properties. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Analytics Data + metadata_name_override: analyticsdata + default_version: v1beta + - name: google-api-core + version: 2.30.2 + python: + library_type: CORE + name_pretty_override: Google API client core library + skip_readme_copy: true + - name: google-apps-card + version: 0.6.0 + apis: + - path: google/apps/card/v1 + description_override: Google Apps Card Protos + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/unit/gapic/card_v1/test_card.py + python: + name_pretty_override: Google Apps Card Protos + api_shortname_override: card + api_id_override: card.googleapis.com + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1 + - name: google-apps-chat + version: 0.7.0 + apis: + - path: google/chat/v1 + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/chat/v1: + - proto-plus-deps=google.apps.card.v1 + - python-gapic-namespace=google.apps + - warehouse-package-name=google-apps-chat + name_pretty_override: Chat API + product_documentation_override: https://developers.google.com/chat/ + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1 + - name: google-apps-events-subscriptions + version: 0.5.0 + apis: + - path: google/apps/events/subscriptions/v1 + - path: google/apps/events/subscriptions/v1beta + description_override: The Google Workspace Events API lets you subscribe to events and manage change notifications across Google Workspace applications. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/apps/events/subscriptions/v1: + - python-gapic-namespace=google.apps + - python-gapic-name=events_subscriptions + google/apps/events/subscriptions/v1beta: + - python-gapic-namespace=google.apps + - python-gapic-name=events_subscriptions + name_pretty_override: Google Workspace Events API + api_shortname_override: subscriptions + api_id_override: subscriptions.googleapis.com + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1 + - name: google-apps-meet + version: 0.4.0 + apis: + - path: google/apps/meet/v2 + - path: google/apps/meet/v2beta + description_override: Create and manage meetings in Google Meet. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Google Meet API + default_version: v2 + - name: google-apps-script-type + version: 0.6.0 + apis: + - path: google/apps/script/type + - path: google/apps/script/type/gmail + - path: google/apps/script/type/docs + - path: google/apps/script/type/drive + - path: google/apps/script/type/sheets + - path: google/apps/script/type/calendar + - path: google/apps/script/type/slides + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/unit/gapic/calendar/test_calendar.py + - tests/unit/gapic/docs/test_docs.py + - tests/unit/gapic/drive/test_drive.py + - tests/unit/gapic/gmail/test_gmail.py + - tests/unit/gapic/sheets/test_sheets.py + - tests/unit/gapic/slides/test_slides.py + - tests/unit/gapic/type/test_type.py + python: + opt_args_by_api: + google/apps/script/type/calendar: + - proto-plus-deps=google.apps.script.type + google/apps/script/type/docs: + - proto-plus-deps=google.apps.script.type + google/apps/script/type/drive: + - proto-plus-deps=google.apps.script.type + google/apps/script/type/gmail: + - proto-plus-deps=google.apps.script.type + google/apps/script/type/sheets: + - proto-plus-deps=google.apps.script.type + google/apps/script/type/slides: + - proto-plus-deps=google.apps.script.type + name_pretty_override: Google Apps Script Type Protos + api_shortname_override: type + api_id_override: type.googleapis.com + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: type + default_version: apiVersion + - name: google-area120-tables + version: 0.14.0 + apis: + - path: google/area120/tables/v1alpha1 + description_override: provides programmatic methods to the Area 120 Tables API. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Area 120 Tables + metadata_name_override: area120tables + default_version: v1alpha1 + - name: google-auth + version: 2.49.1 + python: + library_type: AUTH + name_pretty_override: Google Auth Python Library + issue_tracker_override: https://github.com/googleapis/google-auth-library-python/issues + skip_readme_copy: true + - name: google-auth-httplib2 + version: 0.3.1 + python: + library_type: AUTH + name_pretty_override: Google Auth httplib2 + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + - name: google-auth-oauthlib + version: 1.3.1 + python: + library_type: AUTH + name_pretty_override: Google Auth OAuthlib + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + skip_readme_copy: true + - name: google-cloud-access-approval + version: 1.19.0 + apis: + - path: google/cloud/accessapproval/v1 + description_override: enables controlling access to your organization's data by Google personnel. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + python: + opt_args_by_api: + google/cloud/accessapproval/v1: + - warehouse-package-name=google-cloud-access-approval + product_documentation_override: https://cloud.google.com/access-approval + metadata_name_override: accessapproval + default_version: v1 + - name: google-cloud-access-context-manager + version: 0.4.0 + apis: + - path: google/identity/accesscontextmanager/v1 + - path: google/identity/accesscontextmanager/type + python: + proto_only_apis: + - google/identity/accesscontextmanager/v1 + - google/identity/accesscontextmanager/type + product_documentation_override: https://cloud.google.com/access-context-manager/docs/overview + client_documentation_override: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-context-manager + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: accesscontextmanager + default_version: apiVersion + - name: google-cloud-advisorynotifications + version: 0.6.0 + apis: + - path: google/cloud/advisorynotifications/v1 + description_override: Advisory Notifications provides well-targeted, timely, and compliant communications about critical security and privacy events in the Google Cloud console and allows you to securely investigate the event, take action, and get support. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: advisorynotifications + default_version: v1 + - name: google-cloud-alloydb + version: 0.9.0 + apis: + - path: google/cloud/alloydb/v1 + - path: google/cloud/alloydb/v1beta + - path: google/cloud/alloydb/v1alpha + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + product_documentation_override: https://cloud.google.com/alloydb/ + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: alloydb + default_version: v1 + - name: google-cloud-alloydb-connectors + version: 0.4.0 + apis: + - path: google/cloud/alloydb/connectors/v1 + - path: google/cloud/alloydb/connectors/v1beta + - path: google/cloud/alloydb/connectors/v1alpha + description_override: provides enterprise-grade performance and availability while maintaining 100% compatibility with open-source PostgreSQL. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/unit/gapic/connectors_v1/test_connectors.py + python: + api_shortname_override: connectors + metadata_name_override: connectors + default_version: v1 + - name: google-cloud-api-gateway + version: 1.15.0 + apis: + - path: google/cloud/apigateway/v1 + description_override: enables you to provide secure access to your backend services through a well-defined REST API that is consistent across all of your services, regardless of the service implementation. Clients consume your REST APIS to implement standalone apps for a mobile device or tablet, through apps running in a browser, or through any other type of app that can make a request to an HTTP endpoint. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/apigateway/v1: + - warehouse-package-name=google-cloud-api-gateway + metadata_name_override: apigateway + default_version: v1 + - name: google-cloud-api-keys + version: 0.8.0 + apis: + - path: google/api/apikeys/v2 + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/api/apikeys/v2: + - python-gapic-name=api_keys + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-api-keys + metadata_name_override: apikeys + default_version: v2 + - name: google-cloud-apigee-connect + version: 1.15.0 + apis: + - path: google/cloud/apigeeconnect/v1 + description_override: allows the Apigee hybrid management plane to connect securely to the MART service in the runtime plane without requiring you to expose the MART endpoint on the internet. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/apigeeconnect/v1: + - warehouse-package-name=google-cloud-apigee-connect + product_documentation_override: https://cloud.google.com/apigee/docs/hybrid/v1.4/apigee-connect + metadata_name_override: apigeeconnect + default_version: v1 + - name: google-cloud-apigee-registry + version: 0.9.0 + apis: + - path: google/cloud/apigeeregistry/v1 + description_override: allows teams to upload and share machine-readable descriptions of APIs that are in use and in development. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/apigeeregistry/v1: + - warehouse-package-name=google-cloud-apigee-registry + - python-gapic-namespace=google.cloud + - python-gapic-name=apigee_registry + name_pretty_override: Apigee Registry API + product_documentation_override: https://cloud.google.com/apigee/docs/api-hub/get-started-registry-api + metadata_name_override: apigeeregistry + default_version: v1 + - name: google-cloud-apihub + version: 0.6.0 + apis: + - path: google/cloud/apihub/v1 + description_override: API hub lets you consolidate and organize information about all of the APIs of interest to your organization. API hub lets you capture critical information about APIs that allows developers to discover and evaluate them easily and leverage the work of other teams wherever possible. API platform teams can use API hub to have visibility into and manage their portfolio of APIs. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: API Hub API + product_documentation_override: https://cloud.google.com/apigee/docs/apihub/what-is-api-hub + default_version: v1 + - name: google-cloud-apiregistry + version: 0.2.0 + apis: + - path: google/cloud/apiregistry/v1beta + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Cloud API Registry API + product_documentation_override: https://docs.cloud.google.com/api-registry/docs/overview + default_version: v1beta + - name: google-cloud-appengine-admin + version: 1.17.0 + apis: + - path: google/appengine/v1 + description_override: allows you to manage your App Engine applications. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/appengine/v1: + - warehouse-package-name=google-cloud-appengine-admin + - python-gapic-namespace=google.cloud + - python-gapic-name=appengine_admin + product_documentation_override: https://cloud.google.com/appengine/docs/admin-api/ + metadata_name_override: appengine + default_version: v1 + - name: google-cloud-appengine-logging + version: 1.9.0 + apis: + - path: google/appengine/logging/v1 + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/unit/gapic/appengine_logging_v1/test_appengine_logging_v1.py + python: + library_type: OTHER + opt_args_by_api: + google/appengine/logging/v1: + - warehouse-package-name=google-cloud-appengine-logging + - python-gapic-namespace=google.cloud + - python-gapic-name=appengine_logging + name_pretty_override: App Engine Logging Protos + product_documentation_override: https://cloud.google.com/logging/docs/reference/v2/rpc/google.appengine.logging.v1 + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: appenginelogging + default_version: v1 + - name: google-cloud-apphub + version: 0.4.0 + apis: + - path: google/cloud/apphub/v1 + description_override: 'null ' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: App Hub API + product_documentation_override: https://cloud.google.com/app-hub/docs/overview + default_version: v1 + - name: google-cloud-artifact-registry + version: 1.21.0 + apis: + - path: google/devtools/artifactregistry/v1 + - path: google/devtools/artifactregistry/v1beta2 + description_override: provides a single place for your organization to manage container images and language packages (such as Maven and npm). It is fully integrated with Google Cloud's tooling and runtimes and comes with support for native artifact protocols. This makes it simple to integrate it with your CI/CD tooling to set up automated pipelines. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/devtools/artifactregistry/v1: + - python-gapic-name=artifactregistry + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-artifact-registry + google/devtools/artifactregistry/v1beta2: + - python-gapic-name=artifactregistry + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-artifact-registry + metadata_name_override: artifactregistry + default_version: v1 + - name: google-cloud-asset + version: 4.3.0 + apis: + - path: google/cloud/asset/v1 + - path: google/cloud/asset/v1p5beta1 + - path: google/cloud/asset/v1p2beta1 + - path: google/cloud/asset/v1p1beta1 + description_override: provides inventory services based on a time series database. This database keeps a five week history of Google Cloud asset metadata. The Cloud Asset Inventory export service allows you to export all asset metadata at a certain timestamp or export event change history during a timeframe. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/asset/v1: + - proto-plus-deps=google.cloud.osconfig.v1 + name_pretty_override: Cloud Asset Inventory + product_documentation_override: https://cloud.google.com/resource-manager/docs/cloud-asset-inventory/overview + metadata_name_override: cloudasset + default_version: v1 + - name: google-cloud-assured-workloads + version: 2.3.0 + apis: + - path: google/cloud/assuredworkloads/v1 + - path: google/cloud/assuredworkloads/v1beta1 + description_override: allows you to secure your government workloads and accelerate your path to running compliant workloads on Google Cloud with Assured Workloads for Government. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/assuredworkloads/v1: + - warehouse-package-name=google-cloud-assured-workloads + google/cloud/assuredworkloads/v1beta1: + - warehouse-package-name=google-cloud-assured-workloads + name_pretty_override: Assured Workloads for Government + metadata_name_override: assuredworkloads + default_version: v1 + - name: google-cloud-audit-log + version: 0.5.0 + apis: + - path: google/cloud/audit + python: + library_type: OTHER + proto_only_apis: + - google/cloud/audit + name_pretty_override: Audit Log API + product_documentation_override: https://cloud.google.com/logging/docs/audit + api_shortname_override: auditlog + client_documentation_override: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-audit-log + metadata_name_override: auditlog + default_version: apiVersion + - name: google-cloud-auditmanager + version: 0.2.0 + apis: + - path: google/cloud/auditmanager/v1 + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Audit Manager API + default_version: v1 + - name: google-cloud-automl + version: 2.19.0 + apis: + - path: google/cloud/automl/v1 + - path: google/cloud/automl/v1beta1 + description_override: '**AutoML API Python Client is now available in Vertex AI. Please visit** `Vertex SDK for Python `_ **for the new Python Vertex AI client.** Vertex AI is our next generation AI Platform, with many new features that are unavailable in the current platform. `Migrate your resources to Vertex AI `_ to get the latest machine learning features, simplify end-to-end journeys, and productionize models with MLOps. The `Cloud AutoML API `_ is a suite of machine learning products that enables developers with limited machine learning expertise to train high-quality models specific to their business needs, by leveraging Google''s state-of-the-art transfer learning, and Neural Architecture Search technology.' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - docs/automl_v1beta1/tables.rst + - google/cloud/automl_v1beta1/services/tables + - google/cloud/automl_v1beta1/services/tables/__init__.py + - google/cloud/automl_v1beta1/services/tables/gcs_client.py + - google/cloud/automl_v1beta1/services/tables/tables_client.py + - samples/README + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + - tests/unit/test_gcs_client_v1beta1.py + - tests/unit/test_tables_client_v1beta1.py + python: + library_type: GAPIC_COMBO + product_documentation_override: https://cloud.google.com/automl/docs/ + metadata_name_override: automl + default_version: v1 + - name: google-cloud-backupdr + version: 0.9.0 + apis: + - path: google/cloud/backupdr/v1 + description_override: Backup and DR Service ensures that your data is managed, protected, and accessible using both hybrid and cloud-based backup/recovery appliances that are managed using the Backup and DR management console. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Backup and DR Service API + product_documentation_override: https://cloud.google.com/backup-disaster-recovery/docs/concepts/backup-dr + metadata_name_override: backupdr + default_version: v1 + - name: google-cloud-bare-metal-solution + version: 1.13.0 + apis: + - path: google/cloud/baremetalsolution/v2 + description_override: Bring your Oracle workloads to Google Cloud with Bare Metal Solution and jumpstart your cloud journey with minimal risk. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/baremetalsolution/v2: + - python-gapic-name=bare_metal_solution + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-bare-metal-solution + metadata_name_override: baremetalsolution + default_version: v2 + - name: google-cloud-batch + version: 0.21.0 + apis: + - path: google/cloud/batch/v1 + - path: google/cloud/batch/v1alpha + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Cloud Batch + metadata_name_override: batch + default_version: v1 + - name: google-cloud-beyondcorp-appconnections + version: 0.7.0 + apis: + - path: google/cloud/beyondcorp/appconnections/v1 + description_override: Beyondcorp Enterprise provides identity and context aware access controls for enterprise resources and enables zero-trust access. Using the Beyondcorp Enterprise APIs, enterprises can set up multi-cloud and on-prem connectivity using the App Connector hybrid connectivity solution. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/beyondcorp/appconnections/v1: + - warehouse-package-name=google-cloud-beyondcorp-appconnections + - python-gapic-namespace=google.cloud + - python-gapic-name=beyondcorp_appconnections + name_pretty_override: BeyondCorp AppConnections + metadata_name_override: beyondcorpappconnections + default_version: v1 + - name: google-cloud-beyondcorp-appconnectors + version: 0.7.0 + apis: + - path: google/cloud/beyondcorp/appconnectors/v1 + description_override: Beyondcorp Enterprise provides identity and context aware access controls for enterprise resources and enables zero-trust access. Using the Beyondcorp Enterprise APIs, enterprises can set up multi-cloud and on-prem connectivity using the App Connector hybrid connectivity solution. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/beyondcorp/appconnectors/v1: + - warehouse-package-name=google-cloud-beyondcorp-appconnectors + - python-gapic-namespace=google.cloud + - python-gapic-name=beyondcorp_appconnectors + name_pretty_override: BeyondCorp AppConnectors + metadata_name_override: beyondcorpappconnectors + default_version: v1 + - name: google-cloud-beyondcorp-appgateways + version: 0.7.0 + apis: + - path: google/cloud/beyondcorp/appgateways/v1 + description_override: Beyondcorp Enterprise provides identity and context aware access controls for enterprise resources and enables zero-trust access. Using the Beyondcorp Enterprise APIs, enterprises can set up multi-cloud and on-prem connectivity using the App Connector hybrid connectivity solution. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/beyondcorp/appgateways/v1: + - warehouse-package-name=google-cloud-beyondcorp-appgateways + - python-gapic-namespace=google.cloud + - python-gapic-name=beyondcorp_appgateways + name_pretty_override: BeyondCorp AppGateways + metadata_name_override: beyondcorpappgateways + default_version: v1 + - name: google-cloud-beyondcorp-clientconnectorservices + version: 0.7.0 + apis: + - path: google/cloud/beyondcorp/clientconnectorservices/v1 + description_override: Beyondcorp Enterprise provides identity and context aware access controls for enterprise resources and enables zero-trust access. Using the Beyondcorp Enterprise APIs, enterprises can set up multi-cloud and on-prem connectivity using the App Connector hybrid connectivity solution. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/beyondcorp/clientconnectorservices/v1: + - warehouse-package-name=google-cloud-beyondcorp-clientconnectorservices + - python-gapic-namespace=google.cloud + - python-gapic-name=beyondcorp_clientconnectorservices + name_pretty_override: BeyondCorp ClientConnectorServices + metadata_name_override: beyondcorpclientconnectorservices + default_version: v1 + - name: google-cloud-beyondcorp-clientgateways + version: 0.7.0 + apis: + - path: google/cloud/beyondcorp/clientgateways/v1 + description_override: Beyondcorp Enterprise provides identity and context aware access controls for enterprise resources and enables zero-trust access. Using the Beyondcorp Enterprise APIs, enterprises can set up multi-cloud and on-prem connectivity using the App Connector hybrid connectivity solution. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/beyondcorp/clientgateways/v1: + - warehouse-package-name=google-cloud-beyondcorp-clientgateways + - python-gapic-namespace=google.cloud + - python-gapic-name=beyondcorp_clientgateways + name_pretty_override: BeyondCorp ClientGateways + metadata_name_override: beyondcorpclientgateways + default_version: v1 + - name: google-cloud-biglake + version: 0.3.0 + apis: + - path: google/cloud/biglake/v1 + description_override: The BigLake API provides access to BigLake Metastore, a serverless, fully managed, and highly available metastore for open-source data that can be used for querying Apache Iceberg tables in BigQuery. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: BigLake API + product_documentation_override: https://cloud.google.com/bigquery/docs/iceberg-tables#create-using-biglake-metastore + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1 + - name: google-cloud-biglake-hive + version: 0.1.0 + apis: + - path: google/cloud/biglake/hive/v1beta + description_override: |- + The BigLake API provides access to BigLake Metastore, a serverless, fully + managed, and highly available metastore for open-source data that can be + used for querying Apache Iceberg tables in BigQuery. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/biglake/hive/v1beta: + - python-gapic-namespace=google.cloud + - python-gapic-name=biglake_hive + - warehouse-package-name=google-cloud-biglake-hive + name_pretty_override: BigLake API + product_documentation_override: https://cloud.google.com/bigquery/docs/iceberg-tables#create-using-biglake-metastore + default_version: v1beta + - name: google-cloud-bigquery + version: 3.41.0 + description_override: |- + is a fully managed, NoOps, low cost data analytics service. + Data can be streamed into BigQuery at millions of rows per second to enable real-time analysis. + With BigQuery you can easily deploy Petabyte-scale Databases. + python: + library_type: GAPIC_COMBO + name_pretty_override: Google Cloud BigQuery + product_documentation_override: https://cloud.google.com/bigquery + api_shortname_override: bigquery + api_id_override: bigquery.googleapis.com + issue_tracker_override: https://issuetracker.google.com/savedsearches/559654 + metadata_name_override: bigquery + default_version: v2 + - name: google-cloud-bigquery-analyticshub + version: 0.8.0 + apis: + - path: google/cloud/bigquery/analyticshub/v1 + description_override: Analytics Hub is a data exchange that allows you to efficiently and securely exchange data assets across organizations to address challenges of data reliability and cost. Curate a library of internal and external assets, including unique datasets like Google Trends, backed by the power of BigQuery. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/bigquery/analyticshub/v1: + - python-gapic-name=bigquery_analyticshub + - python-gapic-namespace=google.cloud + name_pretty_override: BigQuery Analytics Hub + metadata_name_override: analyticshub + default_version: v1 + - name: google-cloud-bigquery-biglake + version: 0.7.0 + apis: + - path: google/cloud/bigquery/biglake/v1 + - path: google/cloud/bigquery/biglake/v1alpha1 + description_override: BigLake API + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/bigquery/biglake/v1: + - python-gapic-namespace=google.cloud + - python-gapic-name=bigquery_biglake + google/cloud/bigquery/biglake/v1alpha1: + - python-gapic-namespace=google.cloud + - python-gapic-name=bigquery_biglake + name_pretty_override: BigLake API + product_documentation_override: https://cloud.google.com/bigquery/docs/iceberg-tables#create-using-biglake-metastore + metadata_name_override: biglake + default_version: v1 + - name: google-cloud-bigquery-connection + version: 1.21.0 + apis: + - path: google/cloud/bigquery/connection/v1 + description_override: Manage BigQuery connections to external data sources. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + python: + opt_args_by_api: + google/cloud/bigquery/connection/v1: + - python-gapic-namespace=google.cloud + - python-gapic-name=bigquery_connection + product_documentation_override: https://cloud.google.com/bigquery/docs/reference/bigqueryconnection + metadata_name_override: bigqueryconnection + default_version: v1 + - name: google-cloud-bigquery-data-exchange + version: 0.8.0 + apis: + - path: google/cloud/bigquery/dataexchange/v1beta1 + description_override: is a data exchange that allows you to efficiently and securely exchange data assets across organizations to address challenges of data reliability and cost. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/bigquery/dataexchange/v1beta1: + - python-gapic-name=bigquery_data_exchange + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-bigquery-data-exchange + name_pretty_override: BigQuery Analytics Hub + product_documentation_override: https://cloud.google.com/bigquery/docs/analytics-hub-introduction + metadata_name_override: analyticshub + default_version: v1beta1 + - name: google-cloud-bigquery-datapolicies + version: 0.9.0 + apis: + - path: google/cloud/bigquery/datapolicies/v2 + - path: google/cloud/bigquery/datapolicies/v1 + - path: google/cloud/bigquery/datapolicies/v2beta1 + - path: google/cloud/bigquery/datapolicies/v1beta1 + description_override: Allows users to manage BigQuery data policies. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/bigquery/datapolicies/v1: + - python-gapic-namespace=google.cloud + - python-gapic-name=bigquery_datapolicies + - transport=grpc+rest + google/cloud/bigquery/datapolicies/v1beta1: + - python-gapic-namespace=google.cloud + - python-gapic-name=bigquery_datapolicies + - transport=grpc + google/cloud/bigquery/datapolicies/v2: + - python-gapic-namespace=google.cloud + - python-gapic-name=bigquery_datapolicies + - transport=grpc+rest + google/cloud/bigquery/datapolicies/v2beta1: + - python-gapic-namespace=google.cloud + - python-gapic-name=bigquery_datapolicies + - transport=grpc+rest + product_documentation_override: https://cloud.google.com/bigquery/docs/reference/bigquerydatapolicy/rest + metadata_name_override: bigquerydatapolicy + default_version: v1 + - name: google-cloud-bigquery-datatransfer + version: 3.22.0 + apis: + - path: google/cloud/bigquery/datatransfer/v1 + description_override: allows users to transfer data from partner SaaS applications to Google BigQuery on a scheduled, managed basis. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + python: + opt_args_by_api: + google/cloud/bigquery/datatransfer/v1: + - python-gapic-name=bigquery_datatransfer + - python-gapic-namespace=google.cloud + metadata_name_override: bigquerydatatransfer + default_version: v1 + - name: google-cloud-bigquery-logging + version: 1.9.0 + apis: + - path: google/cloud/bigquery/logging/v1 + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/unit/gapic/bigquery_logging_v1/test_bigquery_logging_v1.py + python: + library_type: OTHER + opt_args_by_api: + google/cloud/bigquery/logging/v1: + - warehouse-package-name=google-cloud-bigquery-logging + - python-gapic-namespace=google.cloud + - python-gapic-name=bigquery_logging + name_pretty_override: BigQuery Logging Protos + product_documentation_override: https://cloud.google.com/bigquery/docs/reference/auditlogs + api_shortname_override: bigquerylogging + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: bigquerylogging + default_version: v1 + - name: google-cloud-bigquery-migration + version: 0.14.0 + apis: + - path: google/cloud/bigquery/migration/v2 + - path: google/cloud/bigquery/migration/v2alpha + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/bigquery/migration/v2: + - python-gapic-name=bigquery_migration + - python-gapic-namespace=google.cloud + google/cloud/bigquery/migration/v2alpha: + - python-gapic-name=bigquery_migration + - python-gapic-namespace=google.cloud + name_pretty_override: Google BigQuery Migration + product_documentation_override: https://cloud.google.com/bigquery/docs/reference/migration/ + metadata_name_override: bigquerymigration + default_version: v2 + - name: google-cloud-bigquery-reservation + version: 1.23.0 + apis: + - path: google/cloud/bigquery/reservation/v1 + description_override: Modify BigQuery flat-rate reservations. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + python: + opt_args_by_api: + google/cloud/bigquery/reservation/v1: + - python-gapic-name=bigquery_reservation + - python-gapic-namespace=google.cloud + product_documentation_override: https://cloud.google.com/bigquery/docs/reference/reservations + metadata_name_override: bigqueryreservation + default_version: v1 + - name: google-cloud-bigquery-storage + version: 2.37.0 + apis: + - path: google/cloud/bigquery/storage/v1 + - path: google/cloud/bigquery/storage/v1beta2 + - path: google/cloud/bigquery/storage/v1beta + - path: google/cloud/bigquery/storage/v1alpha + keep: + - CHANGELOG.md + - CONTRIBUTING.rst + - docs/CHANGELOG.md + - docs/bigquery_storage_v1/library.rst + - docs/bigquery_storage_v1beta2/library.rst + - docs/samples + - google/cloud/bigquery_storage_v1/client.py + - google/cloud/bigquery_storage_v1/exceptions.py + - google/cloud/bigquery_storage_v1/gapic_types.py + - google/cloud/bigquery_storage_v1/reader.py + - google/cloud/bigquery_storage_v1/writer.py + - google/cloud/bigquery_storage_v1beta2/client.py + - google/cloud/bigquery_storage_v1beta2/exceptions.py + - google/cloud/bigquery_storage_v1beta2/writer.py + - samples/__init__.py + - samples/conftest.py + - samples/pyarrow + - samples/pyarrow/__init__.py + - samples/pyarrow/append_rows_with_arrow.py + - samples/pyarrow/append_rows_with_arrow_test.py + - samples/pyarrow/noxfile.py + - samples/pyarrow/requirements-test.txt + - samples/pyarrow/requirements.txt + - samples/pyarrow/test_generate_write_requests.py + - samples/quickstart + - samples/quickstart/__init__.py + - samples/quickstart/noxfile.py + - samples/quickstart/quickstart.py + - samples/quickstart/quickstart_test.py + - samples/quickstart/requirements-test.txt + - samples/quickstart/requirements.txt + - samples/snippets + - samples/snippets/__init__.py + - samples/snippets/append_rows_pending.py + - samples/snippets/append_rows_pending_test.py + - samples/snippets/append_rows_proto2.py + - samples/snippets/append_rows_proto2_test.py + - samples/snippets/conftest.py + - samples/snippets/customer_record.proto + - samples/snippets/customer_record_pb2.py + - samples/snippets/customer_record_schema.json + - samples/snippets/noxfile.py + - samples/snippets/requirements-test.txt + - samples/snippets/requirements.txt + - samples/snippets/sample_data.proto + - samples/snippets/sample_data_pb2.py + - samples/snippets/sample_data_schema.json + - samples/to_dataframe + - samples/to_dataframe/.nox + - samples/to_dataframe/.nox/format + - samples/to_dataframe/.nox/format/bin + - samples/to_dataframe/.nox/format/lib + - samples/to_dataframe/.nox/format/lib/python3.14 + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/black + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/black-22.3.0.dist-info + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/blackd + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/blib2to3 + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/blib2to3/pgen2 + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/click + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/click-8.3.1.dist-info + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/click-8.3.1.dist-info/licenses + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/isort + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/isort-5.10.1.dist-info + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/isort/_future + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/isort/_vendored + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/isort/_vendored/tomli + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/isort/deprecated + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/isort/stdlibs + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/mypy_extensions-1.1.0.dist-info + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/mypy_extensions-1.1.0.dist-info/licenses + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pathspec + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pathspec-1.0.4.dist-info + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pathspec-1.0.4.dist-info/licenses + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pathspec/_backends + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pathspec/_backends/hyperscan + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pathspec/_backends/re2 + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pathspec/_backends/simple + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pathspec/patterns + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pathspec/patterns/gitignore + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/cachecontrol + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/certifi + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/dependency_groups + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/distlib + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/distro + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/idna + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/msgpack + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/packaging + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/pkg_resources + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/platformdirs + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/pygments + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/pyproject_hooks + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/requests + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/resolvelib + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/rich + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/tomli + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/tomli_w + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/truststore + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip-25.3.dist-info/licenses/src/pip/_vendor/urllib3 + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/cli + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/commands + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/distributions + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/index + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/locations + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/metadata + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/metadata/importlib + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/models + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/network + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/operations + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/operations/build + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/operations/install + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/req + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/resolution + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/resolution/legacy + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/resolution/resolvelib + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/utils + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_internal/vcs + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/cachecontrol + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/cachecontrol/caches + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/certifi + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/dependency_groups + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/distlib + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/distro + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/idna + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/msgpack + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/packaging + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/packaging/licenses + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/pkg_resources + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/platformdirs + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/pygments + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/pygments/filters + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/pygments/formatters + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/pygments/lexers + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/pygments/styles + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/pyproject_hooks + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/pyproject_hooks/_in_process + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/requests + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/resolvelib + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/resolvelib/resolvers + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/rich + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/tomli + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/tomli_w + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/truststore + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/urllib3 + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/urllib3/contrib + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/urllib3/contrib/_securetransport + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/urllib3/packages + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/urllib3/packages/backports + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/pip/_vendor/urllib3/util + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/platformdirs + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/platformdirs-4.5.1.dist-info + - samples/to_dataframe/.nox/format/lib/python3.14/site-packages/platformdirs-4.5.1.dist-info/licenses + - samples/to_dataframe/__init__.py + - samples/to_dataframe/jupyter_test.py + - samples/to_dataframe/noxfile.py + - samples/to_dataframe/read_query_results.py + - samples/to_dataframe/read_query_results_test.py + - samples/to_dataframe/read_table_bigquery.py + - samples/to_dataframe/read_table_bigquery_test.py + - samples/to_dataframe/read_table_bqstorage.py + - samples/to_dataframe/read_table_bqstorage_test.py + - samples/to_dataframe/requirements-test.txt + - samples/to_dataframe/requirements.txt + - tests/system + - tests/system/__init__.py + - tests/system/assets + - tests/system/assets/people_data.csv + - tests/system/assets/public_samples_copy.sql + - tests/system/conftest.py + - tests/system/helpers.py + - tests/system/reader + - tests/system/reader/__init__.py + - tests/system/reader/conftest.py + - tests/system/reader/test_reader.py + - tests/system/reader/test_reader_dataframe.py + - tests/system/resources + - tests/system/resources/README.md + - tests/system/resources/person.proto + - tests/system/resources/person_pb2.py + - tests/system/test_writer.py + - tests/unit/helpers.py + - tests/unit/test_packaging.py + - tests/unit/test_read_client_v1.py + - tests/unit/test_reader_v1.py + - tests/unit/test_reader_v1_arrow.py + - tests/unit/test_writer_v1.py + - tests/unit/test_writer_v1beta2.py + python: + library_type: GAPIC_COMBO + opt_args_by_api: + google/cloud/bigquery/storage/v1: + - python-gapic-namespace=google.cloud + - python-gapic-name=bigquery_storage + google/cloud/bigquery/storage/v1alpha: + - python-gapic-name=bigquery_storage + - python-gapic-namespace=google.cloud + google/cloud/bigquery/storage/v1beta: + - python-gapic-namespace=google.cloud + - python-gapic-name=bigquery_storage + google/cloud/bigquery/storage/v1beta2: + - python-gapic-namespace=google.cloud + - python-gapic-name=bigquery_storage + name_pretty_override: Google BigQuery Storage + product_documentation_override: https://cloud.google.com/bigquery/docs/reference/storage/ + issue_tracker_override: https://issuetracker.google.com/savedsearches/559654 + metadata_name_override: bigquerystorage + default_version: v1 + - name: google-cloud-bigtable + version: 2.36.0 + python: + library_type: GAPIC_COMBO + name_pretty_override: Cloud Bigtable + product_documentation_override: https://cloud.google.com/bigtable + api_shortname_override: bigtable + api_id_override: bigtable.googleapis.com + issue_tracker_override: https://issuetracker.google.com/savedsearches/559777 + metadata_name_override: bigtable + default_version: v2 + - name: google-cloud-billing + version: 1.19.0 + apis: + - path: google/cloud/billing/v1 + description_override: allows developers to manage their billing accounts or browse the catalog of SKUs and pricing. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + metadata_name_override: cloudbilling + default_version: v1 + - name: google-cloud-billing-budgets + version: 1.20.0 + apis: + - path: google/cloud/billing/budgets/v1 + - path: google/cloud/billing/budgets/v1beta1 + description_override: The Cloud Billing Budget API stores Cloud Billing budgets, which define a budget plan and the rules to execute as spend is tracked against that plan. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/billing/budgets/v1: + - transport=grpc+rest + google/cloud/billing/budgets/v1beta1: + - transport=grpc + product_documentation_override: https://cloud.google.com/billing/docs/how-to/budget-api-overview + metadata_name_override: billingbudgets + default_version: v1 + - name: google-cloud-binary-authorization + version: 1.16.0 + apis: + - path: google/cloud/binaryauthorization/v1 + - path: google/cloud/binaryauthorization/v1beta1 + description_override: ' is a service on Google Cloud that provides centralized software supply-chain security for applications that run on Google Kubernetes Engine (GKE) and Anthos clusters on VMware' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/binaryauthorization/v1: + - warehouse-package-name=google-cloud-binary-authorization + google/cloud/binaryauthorization/v1beta1: + - warehouse-package-name=google-cloud-binary-authorization + metadata_name_override: binaryauthorization + default_version: v1 + - name: google-cloud-build + version: 3.36.0 + apis: + - path: google/devtools/cloudbuild/v2 + - path: google/devtools/cloudbuild/v1 + description_override: lets you build software quickly across all languages. Get complete control over defining custom workflows for building, testing, and deploying across multiple environments such as VMs, serverless, Kubernetes, or Firebase. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/devtools/cloudbuild/v1: + - python-gapic-namespace=google.cloud.devtools + - warehouse-package-name=google-cloud-build + google/devtools/cloudbuild/v2: + - warehouse-package-name=google-cloud-build + - python-gapic-namespace=google.cloud.devtools + product_documentation_override: https://cloud.google.com/cloud-build/docs/ + metadata_name_override: cloudbuild + default_version: v1 + - name: google-cloud-capacityplanner + version: 0.4.0 + apis: + - path: google/cloud/capacityplanner/v1beta + description_override: Provides programmatic access to Capacity Planner features. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Capacity Planner API + default_version: v1beta + - name: google-cloud-certificate-manager + version: 1.13.0 + apis: + - path: google/cloud/certificatemanager/v1 + description_override: lets you acquire and manage TLS (SSL) certificates for use with Cloud Load Balancing. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/certificatemanager/v1: + - python-gapic-name=certificate_manager + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-certificate-manager + product_documentation_override: https://cloud.google.com/python/docs/reference/certificatemanager/latest + metadata_name_override: certificatemanager + default_version: v1 + - name: google-cloud-ces + version: 0.4.0 + apis: + - path: google/cloud/ces/v1 + - path: google/cloud/ces/v1beta + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Gemini Enterprise for Customer Experience API + default_version: v1 + - name: google-cloud-channel + version: 1.27.0 + apis: + - path: google/cloud/channel/v1 + description_override: With Channel Services, Google Cloud partners and resellers have a single unified resale platform, with a unified resale catalog, customer management, order management, billing management, policy and authorization management, and cost management. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Channel Services + metadata_name_override: cloudchannel + default_version: v1 + - name: google-cloud-chronicle + version: 0.4.0 + apis: + - path: google/cloud/chronicle/v1 + description_override: The Google Cloud Security Operations API, popularly known as the Chronicle API, serves endpoints that enable security analysts to analyze and mitigate a security threat throughout its lifecycle + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Chronicle API + product_documentation_override: https://cloud.google.com/chronicle/docs/secops/secops-overview + default_version: v1 + - name: google-cloud-cloudcontrolspartner + version: 0.5.0 + apis: + - path: google/cloud/cloudcontrolspartner/v1 + - path: google/cloud/cloudcontrolspartner/v1beta + description_override: Provides insights about your customers and their Assured Workloads based on your Sovereign Controls by Partners offering. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Cloud Controls Partner API + product_documentation_override: https://cloud.google.com/sovereign-controls-by-partners/docs/sovereign-partners/reference/rest + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1 + - name: google-cloud-cloudsecuritycompliance + version: 0.6.0 + apis: + - path: google/cloud/cloudsecuritycompliance/v1 + description_override: 'null ' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Cloud Security Compliance API + product_documentation_override: https://cloud.google.com/security-command-center/docs/compliance-manager-overview + default_version: v1 + - name: google-cloud-commerce-consumer-procurement + version: 0.5.0 + apis: + - path: google/cloud/commerce/consumer/procurement/v1 + - path: google/cloud/commerce/consumer/procurement/v1alpha1 + description_override: Cloud Commerce Consumer Procurement API + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/commerce/consumer/procurement/v1: + - python-gapic-name=commerce_consumer_procurement + - python-gapic-namespace=google.cloud + google/cloud/commerce/consumer/procurement/v1alpha1: + - python-gapic-name=commerce_consumer_procurement + - python-gapic-namespace=google.cloud + name_pretty_override: Cloud Commerce Consumer Procurement API + product_documentation_override: https://cloud.google.com/marketplace/docs/ + api_shortname_override: procurement + metadata_name_override: procurement + default_version: v1 + - name: google-cloud-common + version: 1.9.0 + apis: + - path: google/cloud/common + description_override: This package contains generated Python types for google.cloud.common + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/unit/gapic/common/test_common.py + python: + library_type: CORE + name_pretty_override: Google Cloud Common + product_documentation_override: https://cloud.google.com + metadata_name_override: common + default_version: apiVersion + - name: google-cloud-compute + version: 1.47.0 + apis: + - path: google/cloud/compute/v1 + description_override: delivers virtual machines running in Google's innovative data centers and worldwide fiber network. Compute Engine's tooling and workflow support enable scaling from single instances to global, load-balanced cloud computing. Compute Engine's VMs boot quickly, come with persistent disk storage, deliver consistent performance and are available in many configurations. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/system + - tests/system/__init__.py + - tests/system/base.py + - tests/system/test_addresses.py + - tests/system/test_instance_group.py + - tests/system/test_pagination.py + - tests/system/test_smoke.py + python: + opt_args_by_api: + google/cloud/compute/v1: + - transport=rest + name_pretty_override: Compute Engine + metadata_name_override: compute + default_version: v1 + - name: google-cloud-compute-v1beta + version: 0.10.0 + apis: + - path: google/cloud/compute/v1beta + description_override: delivers virtual machines running in Google's innovative data centers and worldwide fiber network. Compute Engine's tooling and workflow support enable scaling from single instances to global, load-balanced cloud computing. Compute Engine's VMs boot quickly, come with persistent disk storage, deliver consistent performance and are available in many configurations. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/compute/v1beta: + - transport=rest + - warehouse-package-name=google-cloud-compute-v1beta + name_pretty_override: Compute Engine + product_documentation_override: https://cloud.google.com/compute/ + issue_tracker_override: https://issuetracker.google.com/issues/new?component=187134&template=0 + default_version: v1beta + - name: google-cloud-confidentialcomputing + version: 0.9.0 + apis: + - path: google/cloud/confidentialcomputing/v1 + description_override: Protect data in-use with Confidential VMs, Confidential GKE, Confidential Dataproc, and Confidential Space. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Confidential Computing API + issue_tracker_override: https://issuetracker.google.com/issues/new?component=1166820 + metadata_name_override: confidentialcomputing + default_version: v1 + - name: google-cloud-config + version: 0.5.0 + apis: + - path: google/cloud/config/v1 + description_override: Infrastructure Manager API + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Infrastructure Manager API + product_documentation_override: https://cloud.google.com/infrastructure-manager/docs/overview + api_shortname_override: config + metadata_name_override: config + default_version: v1 + - name: google-cloud-configdelivery + version: 0.4.0 + apis: + - path: google/cloud/configdelivery/v1 + - path: google/cloud/configdelivery/v1beta + - path: google/cloud/configdelivery/v1alpha + description_override: ConfigDelivery service manages the deployment of kubernetes configuration to a fleet of kubernetes clusters. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Config Delivery API + product_documentation_override: https://cloud.google.com/kubernetes-engine/enterprise/config-sync/docs/reference/rest + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1alpha + - name: google-cloud-contact-center-insights + version: 1.26.0 + apis: + - path: google/cloud/contactcenterinsights/v1 + description_override: ' helps users detect and visualize patterns in their contact center data.' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/contactcenterinsights/v1: + - warehouse-package-name=google-cloud-contact-center-insights + - python-gapic-namespace=google.cloud + - python-gapic-name=contact_center_insights + metadata_name_override: contactcenterinsights + default_version: v1 + - name: google-cloud-container + version: 2.64.0 + apis: + - path: google/container/v1 + - path: google/container/v1beta1 + description_override: The Google Kubernetes Engine API is used for building and managing container based applications, powered by the open source Kubernetes technology. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + python: + opt_args_by_api: + google/container/v1: + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-container + - transport=grpc+rest + google/container/v1beta1: + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-container + - transport=grpc + metadata_name_override: container + default_version: v1 + - name: google-cloud-containeranalysis + version: 2.21.0 + apis: + - path: google/devtools/containeranalysis/v1 + description_override: is a service that provides vulnerability scanning and metadata storage for software artifacts. The service performs vulnerability scans on built software artifacts, such as the images in Container Registry, then stores the resulting metadata and makes it available for consumption through an API. The metadata may come from several sources, including vulnerability scanning, other Cloud services, and third-party providers. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/unit/test_get_grafeas_client.py + python: + opt_args_by_api: + google/devtools/containeranalysis/v1: + - python-gapic-namespace=google.cloud.devtools + - warehouse-package-name=google-cloud-containeranalysis + product_documentation_override: https://cloud.google.com/container-registry/docs/container-analysis + metadata_name_override: containeranalysis + default_version: v1 + - name: google-cloud-contentwarehouse + version: 0.10.0 + apis: + - path: google/cloud/contentwarehouse/v1 + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/contentwarehouse/v1: + - proto-plus-deps=google.cloud.documentai.v1 + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: contentwarehouse + default_version: v1 + - name: google-cloud-core + version: 2.5.1 + python: + library_type: CORE + name_pretty_override: Google API client core library + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + - name: google-cloud-data-fusion + version: 1.16.0 + apis: + - path: google/cloud/datafusion/v1 + description_override: is a fully managed, cloud-native, enterprise data integration service for quickly building and managing data pipelines. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/datafusion/v1: + - warehouse-package-name=google-cloud-data-fusion + - python-gapic-namespace=google.cloud + - python-gapic-name=data_fusion + metadata_name_override: datafusion + default_version: v1 + - name: google-cloud-data-qna + version: 0.13.0 + apis: + - path: google/cloud/dataqna/v1alpha + description_override: Data QnA is a natural language question and answer service for BigQuery data. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/dataqna/v1alpha: + - warehouse-package-name=google-cloud-data-qna + product_documentation_override: https://cloud.google.com/bigquery/docs/dataqna + metadata_name_override: dataqna + default_version: v1alpha + - name: google-cloud-databasecenter + version: 0.7.0 + apis: + - path: google/cloud/databasecenter/v1beta + description_override: |- + Database Center offers a comprehensive, organization-wide platform for + monitoring database fleet health across various products. It simplifies + management and reduces risk by automatically aggregating and summarizing + key health signals, removing the need for custom dashboards. The platform + provides a unified view through its dashboard and API, enabling teams + focused on reliability, compliance, security, cost, and administration to + quickly identify and address relevant issues within their database fleets. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Database Center API + product_documentation_override: https://cloud.google.com/database-center/docs/overview + default_version: v1beta + - name: google-cloud-datacatalog + version: 3.30.0 + apis: + - path: google/cloud/datacatalog/v1 + - path: google/cloud/datacatalog/v1beta1 + description_override: is a fully managed and highly scalable data discovery and metadata management service. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + product_documentation_override: https://cloud.google.com/data-catalog + metadata_name_override: datacatalog + default_version: v1 + - name: google-cloud-datacatalog-lineage + version: 0.6.0 + apis: + - path: google/cloud/datacatalog/lineage/v1 + description_override: 'Data lineage is a Dataplex feature that lets you track how data moves through your systems: where it comes from, where it is passed to, and what transformations are applied to it.' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/datacatalog/lineage/v1: + - python-gapic-namespace=google.cloud + - python-gapic-name=datacatalog_lineage + name_pretty_override: Data Lineage API + product_documentation_override: https://cloud.google.com/data-catalog/docs/concepts/about-data-lineage + api_shortname_override: lineage + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: lineage + default_version: v1 + - name: google-cloud-datacatalog-lineage-configmanagement + version: 0.1.0 + apis: + - path: google/cloud/datacatalog/lineage/configmanagement/v1 + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/datacatalog/lineage/configmanagement/v1: + - python-gapic-namespace=google.cloud + - python-gapic-name=datacatalog_lineage_configmanagement + - warehouse-package-name=google-cloud-datacatalog-lineage-configmanagement + name_pretty_override: Data Lineage API + product_documentation_override: https://cloud.google.com/dataplex/docs/about-data-lineage + default_version: v1 + - name: google-cloud-dataflow-client + version: 0.13.0 + apis: + - path: google/dataflow/v1beta3 + description_override: Unified stream and batch data processing that's serverless, fast, and cost-effective. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/dataflow/v1beta3: + - python-gapic-name=dataflow + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-dataflow-client + metadata_name_override: dataflow + default_version: v1beta3 + - name: google-cloud-dataform + version: 0.10.0 + apis: + - path: google/cloud/dataform/v1 + - path: google/cloud/dataform/v1beta1 + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Cloud Dataform + product_documentation_override: https://cloud.google.com + metadata_name_override: dataform + default_version: v1beta1 + - name: google-cloud-datalabeling + version: 1.16.0 + apis: + - path: google/cloud/datalabeling/v1beta1 + description_override: is a service that lets you work with human labelers to generate highly accurate labels for a collection of data that you can use to train your machine learning models. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Google Cloud Data Labeling + product_documentation_override: https://cloud.google.com/data-labeling/docs/ + metadata_name_override: datalabeling + default_version: v1beta1 + - name: google-cloud-dataplex + version: 2.18.0 + apis: + - path: google/cloud/dataplex/v1 + description_override: provides intelligent data fabric that enables organizations to centrally manage, monitor, and govern their data across data lakes, data warehouses, and data marts with consistent controls, providing access to trusted data and powering analytics at scale. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + product_documentation_override: https://cloud.google.com/dataplex + metadata_name_override: dataplex + default_version: v1 + - name: google-cloud-dataproc + version: 5.26.0 + apis: + - path: google/cloud/dataproc/v1 + description_override: is a faster, easier, more cost-effective way to run Apache Spark and Apache Hadoop. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + python: + name_pretty_override: Google Cloud Dataproc + metadata_name_override: dataproc + default_version: v1 + - name: google-cloud-dataproc-metastore + version: 1.22.0 + apis: + - path: google/cloud/metastore/v1 + - path: google/cloud/metastore/v1beta + - path: google/cloud/metastore/v1alpha + description_override: is a fully managed, highly available, autoscaled, autohealing, OSS-native metastore service that greatly simplifies technical metadata management. Dataproc Metastore service is based on Apache Hive metastore and serves as a critical component towards enterprise data lakes. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/metastore/v1: + - warehouse-package-name=google-cloud-dataproc-metastore + google/cloud/metastore/v1alpha: + - warehouse-package-name=google-cloud-dataproc-metastore + google/cloud/metastore/v1beta: + - warehouse-package-name=google-cloud-dataproc-metastore + metadata_name_override: metastore + default_version: v1 + - name: google-cloud-datastore + version: 2.24.0 + apis: + - path: google/datastore/v1 + - path: google/datastore/admin/v1 + description_override: |- + is a fully managed, schemaless database for + storing non-relational data. Cloud Datastore automatically scales with + your users and supports ACID transactions, high availability of reads and + writes, strong consistency for reads and ancestor queries, and eventual + consistency for all other queries. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + library_type: GAPIC_COMBO + opt_args_by_api: + google/datastore/admin/v1: + - python-gapic-name=datastore_admin + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-datastore + google/datastore/v1: + - python-gapic-namespace=google.cloud + name_pretty_override: Google Cloud Datastore API + product_documentation_override: https://cloud.google.com/datastore + issue_tracker_override: https://issuetracker.google.com/savedsearches/559768 + metadata_name_override: datastore + default_version: v1 + - name: google-cloud-datastream + version: 1.18.0 + apis: + - path: google/cloud/datastream/v1 + - path: google/cloud/datastream/v1alpha1 + description_override: is a serverless and easy-to-use change data capture (CDC) and replication service. It allows you to synchronize data across heterogeneous databases and applications reliably, and with minimal latency and downtime. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + metadata_name_override: datastream + default_version: v1 + - name: google-cloud-deploy + version: 2.10.0 + apis: + - path: google/cloud/deploy/v1 + description_override: is a service that automates delivery of your applications to a series of target environments in a defined sequence + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Google Cloud Deploy + metadata_name_override: clouddeploy + default_version: v1 + - name: google-cloud-developerconnect + version: 0.5.0 + apis: + - path: google/cloud/developerconnect/v1 + description_override: Developer Connect streamlines integration with third-party source code management platforms by simplifying authentication, authorization, and networking configuration. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Developer Connect API + product_documentation_override: https://cloud.google.com/developer-connect/docs/overview + default_version: v1 + - name: google-cloud-devicestreaming + version: 0.4.0 + apis: + - path: google/cloud/devicestreaming/v1 + description_override: The Cloud API for device streaming usage. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Device Streaming API + default_version: v1 + - name: google-cloud-dialogflow + version: 2.47.0 + apis: + - path: google/cloud/dialogflow/v2 + - path: google/cloud/dialogflow/v2beta1 + description_override: is an end-to-end, build-once deploy-everywhere development suite for creating conversational interfaces for websites, mobile applications, popular messaging platforms, and IoT devices. You can use it to build interfaces (such as chatbots and conversational IVR) that enable natural and rich interactions between your users and your business. Dialogflow Enterprise Edition users have access to Google Cloud Support and a service level agreement (SLA) for production deployments. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + product_documentation_override: https://www.dialogflow.com/ + issue_tracker_override: https://issuetracker.google.com/savedsearches/5300385 + metadata_name_override: dialogflow + default_version: v2 + - name: google-cloud-dialogflow-cx + version: 2.5.0 + apis: + - path: google/cloud/dialogflow/cx/v3 + - path: google/cloud/dialogflow/cx/v3beta1 + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/dialogflow/cx/v3: + - python-gapic-name=dialogflowcx + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-dialogflow-cx + google/cloud/dialogflow/cx/v3beta1: + - python-gapic-name=dialogflowcx + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-dialogflow-cx + name_pretty_override: Dialogflow CX + metadata_name_override: dialogflow-cx + default_version: v3 + - name: google-cloud-discoveryengine + version: 0.18.0 + apis: + - path: google/cloud/discoveryengine/v1 + - path: google/cloud/discoveryengine/v1beta + - path: google/cloud/discoveryengine/v1alpha + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Discovery Engine API + product_documentation_override: https://cloud.google.com/discovery-engine/ + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: discoveryengine + default_version: v1beta + - name: google-cloud-dlp + version: 3.35.0 + apis: + - path: google/privacy/dlp/v2 + description_override: provides programmatic access to a powerful detection engine for personally identifiable information and other privacy-sensitive data in unstructured data streams, like text blocks and images. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + python: + opt_args_by_api: + google/privacy/dlp/v2: + - python-gapic-namespace=google.cloud + name_pretty_override: Cloud Data Loss Prevention + product_documentation_override: https://cloud.google.com/dlp/docs/ + metadata_name_override: dlp + default_version: v2 + - name: google-cloud-dms + version: 1.15.0 + apis: + - path: google/cloud/clouddms/v1 + description_override: makes it easier for you to migrate your data to Google Cloud. This service helps you lift and shift your MySQL and PostgreSQL workloads into Cloud SQL. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/clouddms/v1: + - warehouse-package-name=google-cloud-dms + name_pretty_override: Cloud Database Migration Service + metadata_name_override: datamigration + default_version: v1 + - name: google-cloud-dns + version: 0.36.1 + description_override: provides methods that you can use to manage DNS for your applications. + python: + library_type: REST + name_pretty_override: Cloud DNS + product_documentation_override: https://cloud.google.com/dns + api_shortname_override: dns + issue_tracker_override: https://issuetracker.google.com/savedsearches/559772 + metadata_name_override: dns + - name: google-cloud-documentai + version: 3.14.0 + apis: + - path: google/cloud/documentai/v1 + - path: google/cloud/documentai/v1beta3 + description_override: Service to parse structured information from unstructured or semi-structured documents using state-of-the-art Google AI such as natural language, computer vision, translation, and AutoML. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/documentai/v1: + - autogen-snippets + google/cloud/documentai/v1beta3: + - autogen-snippets + name_pretty_override: Document AI + metadata_name_override: documentai + default_version: v1 + - name: google-cloud-documentai-toolbox + version: 0.15.2 + python: + library_type: OTHER + name_pretty_override: Document AI Toolbox + issue_tracker_override: https://github.com/googleapis/python-documentai-toolbox/issues + metadata_name_override: documentai-toolbox + default_version: v1 + - name: google-cloud-domains + version: 1.13.0 + apis: + - path: google/cloud/domains/v1 + - path: google/cloud/domains/v1beta1 + description_override: allows you to register and manage domains by using Cloud Domains. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + metadata_name_override: domains + default_version: v1 + - name: google-cloud-edgecontainer + version: 0.8.0 + apis: + - path: google/cloud/edgecontainer/v1 + description_override: Google Distributed Cloud Edge allows you to run Kubernetes clusters on dedicated hardware provided and maintained by Google that is separate from the Google Cloud data center. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + metadata_name_override: edgecontainer + default_version: v1 + - name: google-cloud-edgenetwork + version: 0.5.0 + apis: + - path: google/cloud/edgenetwork/v1 + description_override: Network management API for Distributed Cloud Edge + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Distributed Cloud Edge Network API + product_documentation_override: https://cloud.google.com/distributed-cloud/edge/latest/docs/overview + default_version: v1 + - name: google-cloud-enterpriseknowledgegraph + version: 0.6.0 + apis: + - path: google/cloud/enterpriseknowledgegraph/v1 + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: enterpriseknowledgegraph + default_version: v1 + - name: google-cloud-error-reporting + version: 1.15.0 + apis: + - path: google/devtools/clouderrorreporting/v1beta1 + description_override: 'counts, analyzes and aggregates the crashes in your running cloud services. A centralized error management interface displays the results with sorting and filtering capabilities. A dedicated view shows the error details: time chart, occurrences, affected user count, first and last seen dates and a cleaned exception stack trace. Opt-in to receive email and mobile alerts on new errors.' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + library_type: GAPIC_COMBO + opt_args_by_api: + google/devtools/clouderrorreporting/v1beta1: + - python-gapic-name=errorreporting + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-error-reporting + name_pretty_override: Error Reporting API + metadata_name_override: clouderrorreporting + default_version: v1beta1 + - name: google-cloud-essential-contacts + version: 1.13.0 + apis: + - path: google/cloud/essentialcontacts/v1 + description_override: helps you customize who receives notifications by providing your own list of contacts in many Google Cloud services. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/essentialcontacts/v1: + - warehouse-package-name=google-cloud-essential-contacts + - python-gapic-namespace=google.cloud + - python-gapic-name=essential_contacts + product_documentation_override: https://cloud.google.com/resource-manager/docs/managing-notification-contacts/ + metadata_name_override: essentialcontacts + default_version: v1 + - name: google-cloud-eventarc + version: 1.20.0 + apis: + - path: google/cloud/eventarc/v1 + description_override: lets you asynchronously deliver events from Google services, SaaS, and your own apps using loosely coupled services that react to state changes. Eventarc requires no infrastructure management, you can optimize productivity and costs while building a modern, event-driven solution. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + metadata_name_override: eventarc + default_version: v1 + - name: google-cloud-eventarc-publishing + version: 0.10.0 + apis: + - path: google/cloud/eventarc/publishing/v1 + description_override: lets you asynchronously deliver events from Google services, SaaS, and your own apps using loosely coupled services that react to state changes. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/eventarc/publishing/v1: + - python-gapic-name=eventarc_publishing + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-eventarc-publishing + metadata_name_override: eventarcpublishing + default_version: v1 + - name: google-cloud-filestore + version: 1.16.0 + apis: + - path: google/cloud/filestore/v1 + description_override: Filestore instances are fully managed NFS file servers on Google Cloud for use with applications running on Compute Engine virtual machines (VMs) instances or Google Kubernetes Engine clusters. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/filestore/v1: + - proto-plus-deps=google.cloud.common + name_pretty_override: Filestore + metadata_name_override: file + default_version: v1 + - name: google-cloud-financialservices + version: 0.4.0 + apis: + - path: google/cloud/financialservices/v1 + description_override: Google Cloud's Anti Money Laundering AI (AML AI) product is an API that scores AML risk. Use it to identify more risk, more defensibly, with fewer false positives and reduced time per review. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Anti Money Laundering AI API + product_documentation_override: https://cloud.google.com/financial-services/anti-money-laundering/docs/concepts/overview + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1 + - name: google-cloud-firestore + version: 2.26.0 + apis: + - path: google/firestore/v1 + - path: google/firestore/admin/v1 + - path: google/firestore/bundle + description_override: is a fully-managed NoSQL document database for mobile, web, and server development from Firebase and Google Cloud Platform. It's backed by a multi-region replicated database that ensures once data is committed, it's durable even in the face of unexpected disasters. Not only that, but despite being a distributed database, it's also strongly consistent and offers seamless integration with other Firebase and Google Cloud Platform products, including Google Cloud Functions. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - docs/firestore_admin_v1/admin_client.rst + - docs/firestore_v1/aggregation.rst + - docs/firestore_v1/batch.rst + - docs/firestore_v1/bulk_writer.rst + - docs/firestore_v1/client.rst + - docs/firestore_v1/collection.rst + - docs/firestore_v1/document.rst + - docs/firestore_v1/field_path.rst + - docs/firestore_v1/query.rst + - docs/firestore_v1/transaction.rst + - docs/firestore_v1/transforms.rst + - docs/firestore_v1/types.rst + python: + library_type: GAPIC_COMBO + opt_args_by_api: + google/firestore/admin/v1: + - python-gapic-name=firestore_admin + - python-gapic-namespace=google.cloud + - transport=grpc+rest + google/firestore/bundle: + - python-gapic-namespace=google.cloud + - python-gapic-name=firestore_bundle + - transport=grpc + google/firestore/v1: + - python-gapic-namespace=google.cloud + - transport=grpc+rest + name_pretty_override: Cloud Firestore API + product_documentation_override: https://cloud.google.com/firestore + issue_tracker_override: https://issuetracker.google.com/savedsearches/5337669 + metadata_name_override: firestore + default_version: v1 + - name: google-cloud-functions + version: 1.23.0 + apis: + - path: google/cloud/functions/v2 + - path: google/cloud/functions/v1 + description_override: is a scalable pay as you go Functions-as-a-Service (FaaS) to run your code with zero server management. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + metadata_name_override: cloudfunctions + default_version: v1 + - name: google-cloud-gdchardwaremanagement + version: 0.5.0 + apis: + - path: google/cloud/gdchardwaremanagement/v1alpha + description_override: Google Distributed Cloud connected allows you to run Kubernetes clusters on dedicated hardware provided and maintained by Google that is separate from the Google Cloud data center. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: GDC Hardware Management API + default_version: v1alpha + - name: google-cloud-geminidataanalytics + version: 0.12.0 + apis: + - path: google/cloud/geminidataanalytics/v1beta + - path: google/cloud/geminidataanalytics/v1alpha + description_override: Developers can use the Conversational Analytics API, accessed through geminidataanalytics.googleapis.com, to build an artificial intelligence (AI)-powered chat interface, or data agent, that answers questions about structured data in BigQuery, Looker, and Looker Studio using natural language. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + product_documentation_override: https://cloud.google.com/gemini/docs/conversational-analytics-api/overview + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1alpha + - name: google-cloud-gke-backup + version: 0.8.0 + apis: + - path: google/cloud/gkebackup/v1 + description_override: An API for backing up and restoring workloads in GKE. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/gkebackup/v1: + - python-gapic-name=gke_backup + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-gke-backup + product_documentation_override: https://cloud.google.com/kubernetes-engine/docs/add-on/backup-for-gke/concepts/backup-for-gke + metadata_name_override: gkebackup + default_version: v1 + - name: google-cloud-gke-connect-gateway + version: 0.13.0 + apis: + - path: google/cloud/gkeconnect/gateway/v1 + - path: google/cloud/gkeconnect/gateway/v1beta1 + description_override: builds on the power of fleets to let Anthos users connect to and run commands against registered Anthos clusters in a simple, consistent, and secured way, whether the clusters are on Google Cloud, other public clouds, or on premises, and makes it easier to automate DevOps processes across all your clusters. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/gkeconnect/gateway/v1: + - warehouse-package-name=google-cloud-gke-connect-gateway + google/cloud/gkeconnect/gateway/v1beta1: + - warehouse-package-name=google-cloud-gke-connect-gateway + name_pretty_override: GKE Connect Gateway + product_documentation_override: https://cloud.google.com/anthos/multicluster-management/gateway + metadata_name_override: connectgateway + default_version: v1 + - name: google-cloud-gke-hub + version: 1.23.0 + apis: + - path: google/cloud/gkehub/v1 + - path: google/cloud/gkehub/v1beta1 + description_override: provides a unified way to work with Kubernetes clusters as part of Anthos, extending GKE to work in multiple environments. You have consistent, unified, and secure infrastructure, cluster, and container management, whether you're using Anthos on Google Cloud (with traditional GKE), hybrid cloud, or multiple public clouds. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - docs/gkehub_v1/configmanagement_v1 + - docs/gkehub_v1/configmanagement_v1/services_.rst + - docs/gkehub_v1/configmanagement_v1/types_.rst + - docs/gkehub_v1/multiclusteringress_v1 + - docs/gkehub_v1/multiclusteringress_v1/services_.rst + - docs/gkehub_v1/multiclusteringress_v1/types_.rst + - docs/gkehub_v1/rbacrolebindingactuation_v1 + - docs/gkehub_v1/rbacrolebindingactuation_v1/services_.rst + - docs/gkehub_v1/rbacrolebindingactuation_v1/types_.rst + - google/cloud/gkehub_v1/configmanagement_v1 + - google/cloud/gkehub_v1/configmanagement_v1/__init__.py + - google/cloud/gkehub_v1/configmanagement_v1/gapic_metadata.json + - google/cloud/gkehub_v1/configmanagement_v1/gapic_version.py + - google/cloud/gkehub_v1/configmanagement_v1/py.typed + - google/cloud/gkehub_v1/configmanagement_v1/services + - google/cloud/gkehub_v1/configmanagement_v1/services/__init__.py + - google/cloud/gkehub_v1/configmanagement_v1/types + - google/cloud/gkehub_v1/configmanagement_v1/types/__init__.py + - google/cloud/gkehub_v1/configmanagement_v1/types/configmanagement.py + - google/cloud/gkehub_v1/multiclusteringress_v1 + - google/cloud/gkehub_v1/multiclusteringress_v1/__init__.py + - google/cloud/gkehub_v1/multiclusteringress_v1/gapic_metadata.json + - google/cloud/gkehub_v1/multiclusteringress_v1/gapic_version.py + - google/cloud/gkehub_v1/multiclusteringress_v1/py.typed + - google/cloud/gkehub_v1/multiclusteringress_v1/services + - google/cloud/gkehub_v1/multiclusteringress_v1/services/__init__.py + - google/cloud/gkehub_v1/multiclusteringress_v1/types + - google/cloud/gkehub_v1/multiclusteringress_v1/types/__init__.py + - google/cloud/gkehub_v1/multiclusteringress_v1/types/multiclusteringress.py + - google/cloud/gkehub_v1/rbacrolebindingactuation_v1 + - google/cloud/gkehub_v1/rbacrolebindingactuation_v1/__init__.py + - google/cloud/gkehub_v1/rbacrolebindingactuation_v1/gapic_metadata.json + - google/cloud/gkehub_v1/rbacrolebindingactuation_v1/gapic_version.py + - google/cloud/gkehub_v1/rbacrolebindingactuation_v1/py.typed + - google/cloud/gkehub_v1/rbacrolebindingactuation_v1/services + - google/cloud/gkehub_v1/rbacrolebindingactuation_v1/services/__init__.py + - google/cloud/gkehub_v1/rbacrolebindingactuation_v1/types + - google/cloud/gkehub_v1/rbacrolebindingactuation_v1/types/__init__.py + - google/cloud/gkehub_v1/rbacrolebindingactuation_v1/types/rbacrolebindingactuation.py + python: + opt_args_by_api: + google/cloud/gkehub/v1: + - warehouse-package-name=google-cloud-gke-hub + google/cloud/gkehub/v1beta1: + - warehouse-package-name=google-cloud-gke-hub + product_documentation_override: https://cloud.google.com/anthos/gke/docs/ + metadata_name_override: gkehub + default_version: v1 + - name: google-cloud-gke-multicloud + version: 0.9.0 + apis: + - path: google/cloud/gkemulticloud/v1 + description_override: An API for provisioning and managing GKE clusters running on AWS and Azure infrastructure through a centralized Google Cloud backed control plane. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/gkemulticloud/v1: + - python-gapic-name=gke_multicloud + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-gke-multicloud + name_pretty_override: Anthos Multicloud + product_documentation_override: https://cloud.google.com/anthos/clusters/docs/multi-cloud + metadata_name_override: gkemulticloud + default_version: v1 + - name: google-cloud-gkerecommender + version: 0.3.0 + apis: + - path: google/cloud/gkerecommender/v1 + description_override: GKE Recommender API + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: GKE Recommender API + product_documentation_override: https://cloud.google.com/kubernetes-engine/docs/how-to/machine-learning/inference-quickstart + default_version: v1 + - name: google-cloud-gsuiteaddons + version: 0.5.0 + apis: + - path: google/cloud/gsuiteaddons/v1 + description_override: Add-ons are customized applications that integrate with Google Workspace applications. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/gsuiteaddons/v1: + - proto-plus-deps=google.apps.script.type.calendar+google.apps.script.type.docs+google.apps.script.type.drive+google.apps.script.type.gmail+google.apps.script.type.sheets+google.apps.script.type.slides+google.apps.script.type + name_pretty_override: Google Workspace Add-ons API + metadata_name_override: gsuiteaddons + default_version: v1 + - name: google-cloud-hypercomputecluster + version: 0.4.0 + apis: + - path: google/cloud/hypercomputecluster/v1 + - path: google/cloud/hypercomputecluster/v1beta + description_override: The Cluster Director API allows you to deploy, manage, and monitor clusters that run AI, ML, or HPC workloads. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Cluster Director API + product_documentation_override: https://cloud.google.com/blog/products/compute/managed-slurm-and-other-cluster-director-enhancements + default_version: v1 + - name: google-cloud-iam + version: 2.22.0 + apis: + - path: google/iam/v3 + - path: google/iam/v2 + - path: google/iam/admin/v1 + - path: google/iam/credentials/v1 + - path: google/iam/v3beta + - path: google/iam/v2beta + description_override: Manages identity and access control for Google Cloud Platform resources, including the creation of service accounts, which you can use to authenticate to Google and make API calls. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/iam/admin/v1: + - python-gapic-name=iam_admin + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-iam + - transport=grpc + google/iam/credentials/v1: + - warehouse-package-name=google-cloud-iam + - python-gapic-namespace=google.cloud + - python-gapic-name=iam_credentials + - transport=grpc+rest + google/iam/v2: + - python-gapic-name=iam + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-iam + - transport=grpc+rest + google/iam/v2beta: + - python-gapic-name=iam + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-iam + - transport=grpc + google/iam/v3: + - python-gapic-name=iam + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-iam + - transport=grpc+rest + google/iam/v3beta: + - python-gapic-name=iam + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-iam + - transport=grpc+rest + name_pretty_override: Cloud Identity and Access Management + product_documentation_override: https://cloud.google.com/iam/docs/ + api_shortname_override: iamcredentials + issue_tracker_override: https://issuetracker.google.com/savedsearches/559761 + metadata_name_override: iam + default_version: v2 + - name: google-cloud-iam-logging + version: 1.7.0 + apis: + - path: google/iam/v1/logging + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/unit/gapic/iam_logging_v1/test_iam_logging.py + python: + library_type: OTHER + opt_args_by_api: + google/iam/v1/logging: + - warehouse-package-name=google-cloud-iam-logging + - python-gapic-namespace=google.cloud + - python-gapic-name=iam_logging + name_pretty_override: IAM Logging Protos + product_documentation_override: https://cloud.google.com/iam/docs/audit-logging + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: iamlogging + default_version: v1 + - name: google-cloud-iap + version: 1.21.0 + apis: + - path: google/cloud/iap/v1 + description_override: Identity-Aware Proxy includes a number of features that can be used to protect access to Google Cloud hosted resources and applications hosted on Google Cloud. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Identity-Aware Proxy + metadata_name_override: iap + default_version: v1 + - name: google-cloud-ids + version: 1.13.0 + apis: + - path: google/cloud/ids/v1 + description_override: Cloud IDS is an intrusion detection service that provides threat detection for intrusions, malware, spyware, and command-and-control attacks on your network. Cloud IDS works by creating a Google-managed peered network with mirrored VMs. Traffic in the peered network is mirrored, and then inspected by Palo Alto Networks threat protection technologies to provide advanced threat detection. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + metadata_name_override: ids + default_version: v1 + - name: google-cloud-kms + version: 3.12.0 + apis: + - path: google/cloud/kms/v1 + description_override: a cloud-hosted key management service that lets you manage cryptographic keys for your cloud services the same way you do on-premises. You can generate, use, rotate, and destroy AES256, RSA 2048, RSA 3072, RSA 4096, EC P256, and EC P384 cryptographic keys. Cloud KMS is integrated with Cloud IAM and Cloud Audit Logging so that you can manage permissions on individual keys and monitor how these are used. Use Cloud KMS to protect secrets and other sensitive data that you need to store in Google Cloud Platform. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + python: + name_pretty_override: Google Cloud Key Management Service + metadata_name_override: cloudkms + default_version: v1 + - name: google-cloud-kms-inventory + version: 0.6.0 + apis: + - path: google/cloud/kms/inventory/v1 + description_override: KMS Inventory API + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/kms/inventory/v1: + - python-gapic-namespace=google.cloud + - python-gapic-name=kms_inventory + - proto-plus-deps=google.cloud.kms.v1 + name_pretty_override: KMS Inventory API + product_documentation_override: https://cloud.google.com/kms/docs/ + api_shortname_override: inventory + api_id_override: inventory.googleapis.com + metadata_name_override: inventory + default_version: v1 + - name: google-cloud-language + version: 2.20.0 + apis: + - path: google/cloud/language/v2 + - path: google/cloud/language/v1 + - path: google/cloud/language/v1beta2 + description_override: provides natural language understanding technologies to developers, including sentiment analysis, entity analysis, entity sentiment analysis, content classification, and syntax analysis. This API is part of the larger Cloud Machine Learning API family. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - samples/README.txt + python: + name_pretty_override: Natural Language + product_documentation_override: https://cloud.google.com/natural-language/docs/ + metadata_name_override: language + default_version: v1 + - name: google-cloud-licensemanager + version: 0.4.0 + apis: + - path: google/cloud/licensemanager/v1 + description_override: 'License Manager is a tool to manage and track third-party licenses on Google Cloud. ' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: License Manager API + product_documentation_override: https://cloud.google.com/compute/docs/instances/windows/ms-licensing + default_version: v1 + - name: google-cloud-life-sciences + version: 0.12.0 + apis: + - path: google/cloud/lifesciences/v2beta + description_override: is a suite of services and tools for managing, processing, and transforming life sciences data. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/lifesciences/v2beta: + - warehouse-package-name=google-cloud-life-sciences + metadata_name_override: lifesciences + default_version: v2beta + - name: google-cloud-locationfinder + version: 0.4.0 + apis: + - path: google/cloud/locationfinder/v1 + description_override: Cloud Location Finder lets you identify and filter cloud locations in regions and zones across Google Cloud, Google Distributed Cloud, Microsoft Azure, Amazon Web Services, and Oracle Cloud Infrastructure based on proximity, geographic location, and carbon footprint. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Cloud Location Finder API + product_documentation_override: https://issuetracker.google.com/issues/new?component=1569265&template=1988535 + api_id_override: locationfinder.googleapis.com + default_version: v1 + - name: google-cloud-logging + version: 3.15.0 + apis: + - path: google/logging/v2 + description_override: Writes log entries and manages your Cloud Logging configuration. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + library_type: GAPIC_COMBO + opt_args_by_api: + google/logging/v2: + - python-gapic-name=logging + - python-gapic-namespace=google.cloud + name_pretty_override: Cloud Logging API + product_documentation_override: https://cloud.google.com/logging/docs + issue_tracker_override: https://issuetracker.google.com/savedsearches/559764 + metadata_name_override: logging + default_version: v2 + - name: google-cloud-lustre + version: 0.4.0 + apis: + - path: google/cloud/lustre/v1 + description_override: 'null ' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Google Cloud Managed Lustre API + default_version: v1 + - name: google-cloud-maintenance-api + version: 0.4.0 + apis: + - path: google/cloud/maintenance/api/v1 + - path: google/cloud/maintenance/api/v1beta + description_override: The Maintenance API provides a centralized view of planned disruptive maintenance events across supported Google Cloud products. It offers users visibility into upcoming, ongoing, and completed maintenance, along with controls to manage certain maintenance activities, such as mainteance windows, rescheduling, and on-demand updates. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/maintenance/api/v1: + - python-gapic-name=maintenance_api + - python-gapic-namespace=google.cloud + google/cloud/maintenance/api/v1beta: + - python-gapic-name=maintenance_api + - python-gapic-namespace=google.cloud + name_pretty_override: Maintenance API + product_documentation_override: https://cloud.google.com/unified-maintenance/docs/overview + api_shortname_override: api + api_id_override: api.googleapis.com + default_version: v1 + - name: google-cloud-managed-identities + version: 1.15.0 + apis: + - path: google/cloud/managedidentities/v1 + description_override: is a highly available, hardened Google Cloud service running actual Microsoft AD that enables you to manage authentication and authorization for your AD-dependent workloads, automate AD server maintenance and security configuration, and connect your on-premises AD domain to the cloud. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/managedidentities/v1: + - warehouse-package-name=google-cloud-managed-identities + metadata_name_override: managedidentities + default_version: v1 + - name: google-cloud-managedkafka + version: 0.4.0 + apis: + - path: google/cloud/managedkafka/v1 + description_override: Managed Service for Apache Kafka API is a managed cloud service that lets you ingest Kafka streams directly into Google Cloud. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + product_documentation_override: https://cloud.google.com/managed-kafka + default_version: v1 + - name: google-cloud-managedkafka-schemaregistry + version: 0.4.0 + apis: + - path: google/cloud/managedkafka/schemaregistry/v1 + description_override: 'Manage Apache Kafka clusters and resources. ' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/managedkafka/schemaregistry/v1: + - python-gapic-name=managedkafka_schemaregistry + - python-gapic-namespace=google.cloud + name_pretty_override: Managed Service for Apache Kafka API + api_shortname_override: schemaregistry + api_id_override: schemaregistry.googleapis.com + default_version: v1 + - name: google-cloud-media-translation + version: 0.14.0 + apis: + - path: google/cloud/mediatranslation/v1beta1 + description_override: provides enterprise quality translation from/to various media types. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/mediatranslation/v1beta1: + - warehouse-package-name=google-cloud-media-translation + metadata_name_override: mediatranslation + default_version: v1beta1 + - name: google-cloud-memcache + version: 1.15.0 + apis: + - path: google/cloud/memcache/v1 + - path: google/cloud/memcache/v1beta2 + description_override: is a fully-managed in-memory data store service for Memcache. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + product_documentation_override: https://cloud.google.com/memorystore/docs/memcached/ + metadata_name_override: memcache + default_version: v1 + - name: google-cloud-memorystore + version: 0.4.0 + apis: + - path: google/cloud/memorystore/v1 + - path: google/cloud/memorystore/v1beta + description_override: Memorystore for Valkey is a fully managed Valkey Cluster service for Google Cloud. Applications running on Google Cloud can achieve extreme performance by leveraging the highly scalable, available, secure Valkey service without the burden of managing complex Valkey deployments. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + product_documentation_override: https://cloud.google.com/memorystore/docs/valkey + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1 + - name: google-cloud-migrationcenter + version: 0.4.0 + apis: + - path: google/cloud/migrationcenter/v1 + description_override: A unified platform that helps you accelerate your end-to-end cloud journey from your current on-premises or cloud environments to Google Cloud. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Migration Center API + product_documentation_override: https://cloud.google.com/migration-center/docs/migration-center-overview + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: migrationcenter + default_version: v1 + - name: google-cloud-modelarmor + version: 0.5.0 + apis: + - path: google/cloud/modelarmor/v1 + - path: google/cloud/modelarmor/v1beta + description_override: Model Armor helps you protect against risks like prompt injection, harmful content, and data leakage in generative AI applications by letting you define policies that filter user prompts and model responses. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Model Armor API + product_documentation_override: https://cloud.google.com/security-command-center/docs/model-armor-overview + api_shortname_override: securitycenter + api_id_override: securitycenter.googleapis.com + default_version: v1 + - name: google-cloud-monitoring + version: 2.30.0 + apis: + - path: google/monitoring/v3 + description_override: collects metrics, events, and metadata from Google Cloud, Amazon Web Services (AWS), hosted uptime probes, and application instrumentation. Using the BindPlane service, you can also collect this data from over 150 common application components, on-premise systems, and hybrid cloud systems. Stackdriver ingests that data and generates insights via dashboards, charts, and alerts. BindPlane is included with your Google Cloud project at no additional cost. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - docs/query.rst + - google/cloud/monitoring_v3/_dataframe.py + - google/cloud/monitoring_v3/query.py + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + - tests/unit/test__dataframe.py + - tests/unit/test_query.py + python: + library_type: GAPIC_COMBO + opt_args_by_api: + google/monitoring/v3: + - python-gapic-namespace=google.cloud + name_pretty_override: Stackdriver Monitoring + metadata_name_override: monitoring + default_version: v3 + - name: google-cloud-monitoring-dashboards + version: 2.21.0 + apis: + - path: google/monitoring/dashboard/v1 + description_override: are one way for you to view and analyze metric data. The Cloud Console provides predefined dashboards that require no setup or configuration. You can also define custom dashboards. With custom dashboards, you have complete control over the charts that are displayed and their configuration. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - google/monitoring + - google/monitoring/dashboard + - google/monitoring/dashboard/__init__.py + - google/monitoring/dashboard/py.typed + - google/monitoring/dashboard_v1 + - google/monitoring/dashboard_v1/__init__.py + - google/monitoring/dashboard_v1/gapic_version.py + - google/monitoring/dashboard_v1/py.typed + - google/monitoring/dashboard_v1/services + - google/monitoring/dashboard_v1/services/__init__.py + - google/monitoring/dashboard_v1/services/dashboards_service + - google/monitoring/dashboard_v1/services/dashboards_service/__init__.py + - google/monitoring/dashboard_v1/services/dashboards_service/async_client.py + - google/monitoring/dashboard_v1/services/dashboards_service/client.py + - google/monitoring/dashboard_v1/services/dashboards_service/pagers.py + - google/monitoring/dashboard_v1/services/dashboards_service/transports + - google/monitoring/dashboard_v1/services/dashboards_service/transports/__init__.py + - google/monitoring/dashboard_v1/services/dashboards_service/transports/base.py + - google/monitoring/dashboard_v1/services/dashboards_service/transports/grpc.py + - google/monitoring/dashboard_v1/services/dashboards_service/transports/grpc_asyncio.py + - google/monitoring/dashboard_v1/types + - google/monitoring/dashboard_v1/types/__init__.py + - google/monitoring/dashboard_v1/types/common.py + - google/monitoring/dashboard_v1/types/dashboard.py + - google/monitoring/dashboard_v1/types/dashboards_service.py + - google/monitoring/dashboard_v1/types/layouts.py + - google/monitoring/dashboard_v1/types/metrics.py + - google/monitoring/dashboard_v1/types/scorecard.py + - google/monitoring/dashboard_v1/types/text.py + - google/monitoring/dashboard_v1/types/widget.py + - google/monitoring/dashboard_v1/types/xychart.py + - tests/unit/gapic/dashboard_v1 + - tests/unit/gapic/dashboard_v1/__init__.py + - tests/unit/gapic/dashboard_v1/test_dashboards_service.py + python: + opt_args_by_api: + google/monitoring/dashboard/v1: + - python-gapic-name=monitoring_dashboard + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-monitoring-dashboards + name_pretty_override: Monitoring Dashboards + metadata_name_override: monitoring-dashboards + default_version: v1 + - name: google-cloud-monitoring-metrics-scopes + version: 1.12.0 + apis: + - path: google/monitoring/metricsscope/v1 + description_override: Manages your Cloud Monitoring data and configurations. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/monitoring/metricsscope/v1: + - python-gapic-name=monitoring_metrics_scope + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-monitoring-metrics-scopes + name_pretty_override: Metrics Scopes + default_version: v1 + - name: google-cloud-ndb + version: 2.4.2 + python: + library_type: GAPIC_MANUAL + name_pretty_override: NDB Client Library for Google Cloud Datastore + api_shortname_override: datastore + client_documentation_override: https://googleapis.dev/python/python-ndb/latest + issue_tracker_override: https://github.com/googleapis/python-ndb/issues + metadata_name_override: python-ndb + skip_readme_copy: true + - name: google-cloud-netapp + version: 0.9.0 + apis: + - path: google/cloud/netapp/v1 + description_override: NetApp API + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: NetApp API + product_documentation_override: https://cloud.google.com/netapp/volumes/docs/discover/overview + metadata_name_override: netapp + default_version: v1 + - name: google-cloud-network-connectivity + version: 2.15.0 + apis: + - path: google/cloud/networkconnectivity/v1 + - path: google/cloud/networkconnectivity/v1beta + - path: google/cloud/networkconnectivity/v1alpha1 + description_override: The Network Connectivity API will be home to various services which provide information pertaining to network connectivity. This includes information like interconnects, VPNs, VPCs, routing information, ip address details, etc. This information will help customers verify their network configurations and helps them to discover misconfigurations, inconsistencies, etc. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/networkconnectivity/v1: + - warehouse-package-name=google-cloud-network-connectivity + google/cloud/networkconnectivity/v1alpha1: + - warehouse-package-name=google-cloud-network-connectivity + google/cloud/networkconnectivity/v1beta: + - warehouse-package-name=google-cloud-network-connectivity + name_pretty_override: Network Connectivity Center + metadata_name_override: networkconnectivity + default_version: v1 + - name: google-cloud-network-management + version: 1.34.0 + apis: + - path: google/cloud/networkmanagement/v1 + description_override: provides a collection of network performance monitoring and diagnostic capabilities. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/networkmanagement/v1: + - python-gapic-name=network_management + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-network-management + metadata_name_override: networkmanagement + default_version: v1 + - name: google-cloud-network-security + version: 0.13.0 + apis: + - path: google/cloud/networksecurity/v1 + - path: google/cloud/networksecurity/v1beta1 + - path: google/cloud/networksecurity/v1alpha1 + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/networksecurity/v1: + - python-gapic-name=network_security + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-network-security + google/cloud/networksecurity/v1alpha1: + - python-gapic-name=network_security + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-network-security + google/cloud/networksecurity/v1beta1: + - warehouse-package-name=google-cloud-network-security + - python-gapic-namespace=google.cloud + - python-gapic-name=network_security + product_documentation_override: https://cloud.google.com/traffic-director/docs/reference/network-security/rest + metadata_name_override: networksecurity + default_version: v1 + - name: google-cloud-network-services + version: 0.9.0 + apis: + - path: google/cloud/networkservices/v1 + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/networkservices/v1: + - python-gapic-name=network_services + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-network-services + product_documentation_override: https://cloud.google.com + metadata_name_override: networkservices + default_version: v1 + - name: google-cloud-notebooks + version: 1.16.0 + apis: + - path: google/cloud/notebooks/v2 + - path: google/cloud/notebooks/v1 + - path: google/cloud/notebooks/v1beta1 + description_override: is a managed service that offers an integrated and secure JupyterLab environment for data scientists and machine learning developers to experiment, develop, and deploy models into production. Users can create instances running JupyterLab that come pre-installed with the latest data science and machine learning frameworks in a single click. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/notebooks/v1: + - transport=grpc + google/cloud/notebooks/v1beta1: + - transport=grpc+rest + google/cloud/notebooks/v2: + - transport=grpc+rest + name_pretty_override: AI Platform Notebooks + product_documentation_override: https://cloud.google.com/ai-platform/notebooks/ + metadata_name_override: notebooks + default_version: v1 + - name: google-cloud-optimization + version: 1.14.0 + apis: + - path: google/cloud/optimization/v1 + description_override: is a managed routing service that takes your list of orders, vehicles, constraints, and objectives and returns the most efficient plan for your entire fleet in near real-time. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + metadata_name_override: optimization + default_version: v1 + - name: google-cloud-oracledatabase + version: 0.5.0 + apis: + - path: google/cloud/oracledatabase/v1 + description_override: The Oracle Database@Google Cloud API provides a set of APIs to manage Oracle database services, such as Exadata and Autonomous Databases. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Oracle Database@Google Cloud API + default_version: v1 + - name: google-cloud-orchestration-airflow + version: 1.20.0 + apis: + - path: google/cloud/orchestration/airflow/service/v1 + - path: google/cloud/orchestration/airflow/service/v1beta1 + description_override: is a managed Apache Airflow service that helps you create, schedule, monitor and manage workflows. Cloud Composer automation helps you create Airflow environments quickly and use Airflow-native tools, such as the powerful Airflow web interface and command line tools, so you can focus on your workflows and not your infrastructure. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/orchestration/airflow/service/v1: + - warehouse-package-name=google-cloud-orchestration-airflow + metadata_name_override: composer + default_version: v1 + - name: google-cloud-org-policy + version: 1.17.0 + apis: + - path: google/cloud/orgpolicy/v2 + - path: google/cloud/orgpolicy/v1 + description_override: The Organization Policy API allows users to configure governance rules on their GCP resources across the Cloud Resource Hierarchy. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - google/cloud/orgpolicy/v1/__init__.py + - tests/unit/test_packaging.py + python: + opt_args_by_api: + google/cloud/orgpolicy/v2: + - warehouse-package-name=google-cloud-org-policy + proto_only_apis: + - google/cloud/orgpolicy/v1 + product_documentation_override: https://cloud.google.com/resource-manager/docs/organization-policy/overview + metadata_name_override: orgpolicy + default_version: v2 + - name: google-cloud-os-config + version: 1.24.0 + apis: + - path: google/cloud/osconfig/v1 + - path: google/cloud/osconfig/v1alpha + description_override: provides OS management tools that can be used for patch management, patch compliance, and configuration management on VM instances. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + python: + opt_args_by_api: + google/cloud/osconfig/v1: + - warehouse-package-name=google-cloud-os-config + google/cloud/osconfig/v1alpha: + - warehouse-package-name=google-cloud-os-config + product_documentation_override: https://cloud.google.com/compute/docs/manage-os + metadata_name_override: osconfig + default_version: v1 + - name: google-cloud-os-login + version: 2.20.0 + apis: + - path: google/cloud/oslogin/v1 + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - docs/oslogin_v1/common/types.rst + - google/cloud/oslogin_v1/common + - google/cloud/oslogin_v1/common/__init__.py + - google/cloud/oslogin_v1/common/gapic_metadata.json + - google/cloud/oslogin_v1/common/py.typed + - google/cloud/oslogin_v1/common/types + - google/cloud/oslogin_v1/common/types/__init__.py + - google/cloud/oslogin_v1/common/types/common.py + python: + opt_args_by_api: + google/cloud/oslogin/v1: + - warehouse-package-name=google-cloud-os-login + - proto-plus-deps=google.cloud.oslogin.common + name_pretty_override: Google Cloud OS Login + product_documentation_override: https://cloud.google.com/compute/docs/oslogin/ + metadata_name_override: oslogin + default_version: v1 + - name: google-cloud-parallelstore + version: 0.6.0 + apis: + - path: google/cloud/parallelstore/v1 + - path: google/cloud/parallelstore/v1beta + description_override: Parallelstore is based on Intel DAOS and delivers up to 6.3x greater read throughput performance compared to competitive Lustre scratch offerings. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Parallelstore API + product_documentation_override: https://cloud.google.com/parallelstore + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1beta + - name: google-cloud-parametermanager + version: 0.4.0 + apis: + - path: google/cloud/parametermanager/v1 + description_override: '(Public Preview) Parameter Manager is a single source of truth to store, access and manage the lifecycle of your workload parameters. Parameter Manager aims to make management of sensitive application parameters effortless for customers without diminishing focus on security. ' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Parameter Manager API + product_documentation_override: https://cloud.google.com/secret-manager/parameter-manager/docs/overview + issue_tracker_override: https://issuetracker.google.com/issues/new?component=1442085&template=2002674 + default_version: v1 + - name: google-cloud-phishing-protection + version: 1.17.0 + apis: + - path: google/cloud/phishingprotection/v1beta1 + description_override: helps prevent users from accessing phishing sites by identifying various signals associated with malicious content, including the use of your brand assets, classifying malicious content that uses your brand and reporting the unsafe URLs to Google Safe Browsing. Once a site is propagated to Safe Browsing, users will see warnings across more than 4 billion devices. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/phishingprotection/v1beta1: + - warehouse-package-name=google-cloud-phishing-protection + product_documentation_override: https://cloud.google.com/phishing-protection/docs/ + metadata_name_override: phishingprotection + default_version: v1beta1 + - name: google-cloud-policy-troubleshooter + version: 1.16.0 + apis: + - path: google/cloud/policytroubleshooter/v1 + description_override: makes it easier to understand why a user has access to a resource or doesn't have permission to call an API. Given an email, resource, and permission, Policy Troubleshooter examines all Identity and Access Management (IAM) policies that apply to the resource. It then reveals whether the member's roles include the permission on that resource and, if so, which policies bind the member to those roles. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/policytroubleshooter/v1: + - warehouse-package-name=google-cloud-policy-troubleshooter + name_pretty_override: IAM Policy Troubleshooter API + product_documentation_override: https://cloud.google.com/iam/docs/troubleshooting-access#rest-api/ + metadata_name_override: policytroubleshooter + default_version: v1 + - name: google-cloud-policysimulator + version: 0.4.0 + apis: + - path: google/cloud/policysimulator/v1 + description_override: Policy Simulator is a collection of endpoints for creating, running, and viewing a `Replay`. A `Replay` is a type of simulation that lets you see how your members' access to resources might change if you changed your IAM policy. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/policysimulator/v1: + - proto-plus-deps=google.cloud.orgpolicy.v2 + name_pretty_override: Policy Simulator API + product_documentation_override: https://cloud.google.com/policy-intelligence/docs/iam-simulator-overview + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: policysimulator + default_version: v1 + - name: google-cloud-policytroubleshooter-iam + version: 0.4.0 + apis: + - path: google/cloud/policytroubleshooter/iam/v3 + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/policytroubleshooter/iam/v3: + - python-gapic-namespace=google.cloud + - python-gapic-name=policytroubleshooter_iam + name_pretty_override: Policy Troubleshooter API + product_documentation_override: https://cloud.google.com/policy-intelligence/docs/troubleshoot-access + api_shortname_override: iam + metadata_name_override: policytroubleshooter-iam + default_version: v3 + - name: google-cloud-private-ca + version: 1.18.0 + apis: + - path: google/cloud/security/privateca/v1 + - path: google/cloud/security/privateca/v1beta1 + description_override: simplifies the deployment and management of private CAs without managing infrastructure. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/security/privateca/v1: + - warehouse-package-name=google-cloud-private-ca + google/cloud/security/privateca/v1beta1: + - warehouse-package-name=google-cloud-private-ca + name_pretty_override: Private Certificate Authority + metadata_name_override: privateca + default_version: v1 + - name: google-cloud-private-catalog + version: 0.12.0 + apis: + - path: google/cloud/privatecatalog/v1beta1 + description_override: allows developers and cloud admins to make their solutions discoverable to their internal enterprise users. Cloud admins can manage their solutions and ensure their users are always launching the latest versions. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/privatecatalog/v1beta1: + - warehouse-package-name=google-cloud-private-catalog + - autogen-snippets + name_pretty_override: Private Catalog + metadata_name_override: cloudprivatecatalog + default_version: v1beta1 + - name: google-cloud-privilegedaccessmanager + version: 0.4.0 + apis: + - path: google/cloud/privilegedaccessmanager/v1 + description_override: Privileged Access Manager (PAM) helps you on your journey towards least privilege and helps mitigate risks tied to privileged access misuse or abuse. PAM allows you to shift from always-on standing privileges towards on-demand access with just-in-time, time-bound, and approval-based access elevations. PAM allows IAM administrators to create entitlements that can grant just-in-time, temporary access to any resource scope. Requesters can explore eligible entitlements and request the access needed for their task. Approvers are notified when approvals await their decision. Streamlined workflows facilitated by using PAM can support various use cases, including emergency access for incident responders, time-boxed access for developers for critical deployment or maintenance, temporary access for operators for data ingestion and audits, JIT access to service accounts for automated tasks, and more. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Privileged Access Manager API + product_documentation_override: https://cloud.google.com/iam/docs/pam-overview + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1 + - name: google-cloud-pubsub + version: 2.36.0 + apis: + - path: google/pubsub/v1 + description_override: is designed to provide reliable, many-to-many, asynchronous messaging between applications. Publisher applications can send messages to a topic and other applications can subscribe to that topic to receive the messages. By decoupling senders and receivers, Google Cloud Pub/Sub allows developers to communicate between independently written applications. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + library_type: GAPIC_COMBO + opt_args_by_api: + google/pubsub/v1: + - warehouse-package-name=google-cloud-pubsub + name_pretty_override: Google Cloud Pub/Sub + product_documentation_override: https://cloud.google.com/pubsub/docs/ + issue_tracker_override: https://issuetracker.google.com/savedsearches/559741 + metadata_name_override: pubsub + default_version: v1 + - name: google-cloud-quotas + version: 0.6.0 + apis: + - path: google/api/cloudquotas/v1 + - path: google/api/cloudquotas/v1beta + description_override: Cloud Quotas API provides Google Cloud service consumers with management and observability for resource usage, quotas, and restrictions of the services they consume. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/api/cloudquotas/v1: + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-quotas + google/api/cloudquotas/v1beta: + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-quotas + name_pretty_override: Cloud Quotas API + product_documentation_override: https://cloud.google.com/docs/quota/api-overview + metadata_name_override: google-cloud-cloudquotas + default_version: v1 + - name: google-cloud-rapidmigrationassessment + version: 0.4.0 + apis: + - path: google/cloud/rapidmigrationassessment/v1 + description_override: The Rapid Migration Assessment service is our first-party migration assessment and planning tool. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Rapid Migration Assessment API + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: rapidmigrationassessment + default_version: v1 + - name: google-cloud-recaptcha-enterprise + version: 1.31.0 + apis: + - path: google/cloud/recaptchaenterprise/v1 + description_override: protect your website from fraudulent activity like scraping, credential stuffing, and automated account creation. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/recaptchaenterprise/v1: + - warehouse-package-name=google-cloud-recaptcha-enterprise + metadata_name_override: recaptchaenterprise + default_version: v1 + - name: google-cloud-recommendations-ai + version: 0.13.0 + apis: + - path: google/cloud/recommendationengine/v1beta1 + description_override: delivers highly personalized product recommendations at scale. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/recommendationengine/v1beta1: + - warehouse-package-name=google-cloud-recommendations-ai + metadata_name_override: recommendationengine + default_version: v1beta1 + - name: google-cloud-recommender + version: 2.21.0 + apis: + - path: google/cloud/recommender/v1 + - path: google/cloud/recommender/v1beta1 + description_override: delivers highly personalized product recommendations at scale. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Cloud Recommender + metadata_name_override: recommender + default_version: v1 + - name: google-cloud-redis + version: 2.21.0 + apis: + - path: google/cloud/redis/v1 + - path: google/cloud/redis/v1beta1 + description_override: is a fully managed Redis service for the Google Cloud. Applications running on Google Cloud can achieve extreme performance by leveraging the highly scalable, available, secure Redis service without the burden of managing complex Redis deployments. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Cloud Redis + product_documentation_override: https://cloud.google.com/memorystore/docs/redis/ + issue_tracker_override: https://issuetracker.google.com/savedsearches/5169231 + metadata_name_override: redis + default_version: v1 + - name: google-cloud-redis-cluster + version: 0.4.0 + apis: + - path: google/cloud/redis/cluster/v1 + - path: google/cloud/redis/cluster/v1beta1 + description_override: Creates and manages Redis instances on the Google Cloud Platform. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/redis/cluster/v1: + - python-gapic-name=redis_cluster + - python-gapic-namespace=google.cloud + google/cloud/redis/cluster/v1beta1: + - python-gapic-name=redis_cluster + - python-gapic-namespace=google.cloud + name_pretty_override: Google Cloud Memorystore for Redis API + product_documentation_override: https://cloud.google.com/redis/docs + api_shortname_override: cluster + api_id_override: cluster.googleapis.com + default_version: v1 + - name: google-cloud-resource-manager + version: 1.17.0 + apis: + - path: google/cloud/resourcemanager/v3 + description_override: provides methods that you can use to programmatically manage your projects in the Google Cloud Platform. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/resourcemanager/v3: + - warehouse-package-name=google-cloud-resource-manager + name_pretty_override: Resource Manager + metadata_name_override: cloudresourcemanager + default_version: v3 + - name: google-cloud-retail + version: 2.10.0 + apis: + - path: google/cloud/retail/v2 + - path: google/cloud/retail/v2beta + - path: google/cloud/retail/v2alpha + description_override: Cloud Retail service enables customers to build end-to-end personalized recommendation systems without requiring a high level of expertise in machine learning, recommendation system, or Google Cloud. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Retail + product_documentation_override: https://cloud.google.com/retail/docs/ + metadata_name_override: retail + default_version: v2 + - name: google-cloud-run + version: 0.16.0 + apis: + - path: google/cloud/run/v2 + description_override: is a managed compute platform that enables you to run containers that are invocable via requests or events. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Cloud Run + metadata_name_override: run + default_version: v2 + - name: google-cloud-runtimeconfig + version: 0.36.1 + python: + library_type: GAPIC_MANUAL + name_pretty_override: Google Cloud Runtime Configurator + product_documentation_override: https://cloud.google.com/deployment-manager/runtime-configurator/ + api_shortname_override: runtimeconfig + api_id_override: runtimeconfig.googleapis.com + issue_tracker_override: https://issuetracker.google.com/savedsearches/559663 + metadata_name_override: runtimeconfig + - name: google-cloud-saasplatform-saasservicemgmt + version: 0.5.0 + apis: + - path: google/cloud/saasplatform/saasservicemgmt/v1beta1 + description_override: SaaS Runtime lets you store, host, manage, and monitor software as a service (SaaS) applications on Google Cloud. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/saasplatform/saasservicemgmt/v1beta1: + - python-gapic-namespace=google.cloud + - python-gapic-name=saasplatform_saasservicemgmt + name_pretty_override: SaaS Runtime API + product_documentation_override: https://cloud.google.com/saas-runtime/docs/overview + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1beta1 + - name: google-cloud-scheduler + version: 2.19.0 + apis: + - path: google/cloud/scheduler/v1 + - path: google/cloud/scheduler/v1beta1 + description_override: lets you set up scheduled units of work to be executed at defined times or regular intervals. These work units are commonly known as cron jobs. Typical use cases might include sending out a report email on a daily basis, updating some cached data every 10 minutes, or updating some summary information once an hour. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + python: + metadata_name_override: cloudscheduler + default_version: v1 + - name: google-cloud-secret-manager + version: 2.27.0 + apis: + - path: google/cloud/secretmanager/v1 + - path: google/cloud/secretmanager/v1beta2 + - path: google/cloud/secrets/v1beta1 + description_override: Stores, manages, and secures access to application secrets. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/secretmanager/v1: + - warehouse-package-name=google-cloud-secret-manager + google/cloud/secrets/v1beta1: + - python-gapic-namespace=google.cloud + - python-gapic-name=secretmanager + metadata_name_override: secretmanager + default_version: v1 + - name: google-cloud-securesourcemanager + version: 0.5.0 + apis: + - path: google/cloud/securesourcemanager/v1 + description_override: Regionally deployed, single-tenant managed source code repository hosted on Google Cloud. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Secure Source Manager API + product_documentation_override: https://cloud.google.com/secure-source-manager/docs/overview + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: securesourcemanager + default_version: v1 + - name: google-cloud-security-publicca + version: 0.6.0 + apis: + - path: google/cloud/security/publicca/v1 + - path: google/cloud/security/publicca/v1beta1 + description_override: simplifies the deployment and management of public CAs without managing infrastructure. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/security/publicca/v1beta1: + - warehouse-package-name=google-cloud-public-ca + product_documentation_override: https://cloud.google.com/certificate-manager/docs/public-ca + metadata_name_override: publicca + default_version: v1 + - name: google-cloud-securitycenter + version: 1.44.0 + apis: + - path: google/cloud/securitycenter/v2 + - path: google/cloud/securitycenter/v1 + - path: google/cloud/securitycenter/v1p1beta1 + - path: google/cloud/securitycenter/v1beta1 + description_override: makes it easier for you to prevent, detect, and respond to threats. Identify security misconfigurations in virtual machines, networks, applications, and storage buckets from a centralized dashboard. Take action on them before they can potentially result in business damage or loss. Built-in capabilities can quickly surface suspicious activity in your Stackdriver security logs or indicate compromised virtual machines. Respond to threats by following actionable recommendations or exporting logs to your SIEM for further investigation. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Google Cloud Security Command Center + product_documentation_override: https://cloud.google.com/security-command-center + issue_tracker_override: https://issuetracker.google.com/savedsearches/559748 + metadata_name_override: securitycenter + default_version: v1 + - name: google-cloud-securitycentermanagement + version: 0.4.0 + apis: + - path: google/cloud/securitycentermanagement/v1 + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Security Center Management API + product_documentation_override: https://cloud.google.com/securitycentermanagement/docs/overview + api_shortname_override: securitycenter + api_id_override: securitycenter.googleapis.com + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1 + - name: google-cloud-service-control + version: 1.19.0 + apis: + - path: google/api/servicecontrol/v2 + - path: google/api/servicecontrol/v1 + description_override: ' is a foundational platform for creating, managing, securing, and consuming APIs and services across organizations. It is used by Google APIs, Cloud APIs, Cloud Endpoints, and API Gateway.' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/api/servicecontrol/v1: + - python-gapic-name=servicecontrol + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-service-control + google/api/servicecontrol/v2: + - python-gapic-name=servicecontrol + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-service-control + product_documentation_override: https://cloud.google.com/service-infrastructure/docs/overview/ + metadata_name_override: servicecontrol + default_version: v1 + - name: google-cloud-service-directory + version: 1.17.0 + apis: + - path: google/cloud/servicedirectory/v1 + - path: google/cloud/servicedirectory/v1beta1 + description_override: Allows the registration and lookup of services. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/servicedirectory/v1: + - warehouse-package-name=google-cloud-service-directory + google/cloud/servicedirectory/v1beta1: + - warehouse-package-name=google-cloud-service-directory + metadata_name_override: servicedirectory + default_version: v1 + - name: google-cloud-service-management + version: 1.16.0 + apis: + - path: google/api/servicemanagement/v1 + description_override: is a foundational platform for creating, managing, securing, and consuming APIs and services across organizations. It is used by Google APIs, Cloud APIs, Cloud Endpoints, and API Gateway. Service Infrastructure provides a wide range of features to service consumers and service producers, including authentication, authorization, auditing, rate limiting, analytics, billing, logging, and monitoring. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/api/servicemanagement/v1: + - python-gapic-name=servicemanagement + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-service-management + product_documentation_override: https://cloud.google.com/service-infrastructure/docs/overview/ + metadata_name_override: servicemanagement + default_version: v1 + - name: google-cloud-service-usage + version: 1.16.0 + apis: + - path: google/api/serviceusage/v1 + description_override: is an infrastructure service of Google Cloud that lets you list and manage other APIs and services in your Cloud projects. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/api/serviceusage/v1: + - python-gapic-name=service_usage + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-service-usage + metadata_name_override: serviceusage + default_version: v1 + - name: google-cloud-servicehealth + version: 0.4.0 + apis: + - path: google/cloud/servicehealth/v1 + description_override: Personalized Service Health helps you gain visibility into disruptive events impacting Google Cloud products. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Service Health API + product_documentation_override: https://cloud.google.com/service-health/docs/overview + default_version: v1 + - name: google-cloud-shell + version: 1.15.0 + apis: + - path: google/cloud/shell/v1 + description_override: is an interactive shell environment for Google Cloud that makes it easy for you to learn and experiment with Google Cloud and manage your projects and resources from your web browser. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + metadata_name_override: cloudshell + default_version: v1 + - name: google-cloud-source-context + version: 1.10.0 + apis: + - path: google/devtools/source/v1 + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/unit/gapic/source_context_v1/test_source_context_v1.py + python: + library_type: OTHER + opt_args_by_api: + google/devtools/source/v1: + - warehouse-package-name=google-cloud-source-context + - python-gapic-namespace=google.cloud + - python-gapic-name=source_context + name_pretty_override: Source Context + product_documentation_override: https://cloud.google.com + api_shortname_override: source + api_id_override: source.googleapis.com + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: source + default_version: v1 + - name: google-cloud-spanner + version: 3.64.0 + apis: + - path: google/spanner/v1 + - path: google/spanner/admin/instance/v1 + - path: google/spanner/admin/database/v1 + description_override: "is the world's first fully managed relational database service \nto offer both strong consistency and horizontal scalability for \nmission-critical online transaction processing (OLTP) applications. With Cloud \nSpanner you enjoy all the traditional benefits of a relational database; but \nunlike any other relational database service, Cloud Spanner scales horizontally \nto hundreds or thousands of servers to handle the biggest transactional \nworkloads." + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + library_type: GAPIC_COMBO + opt_args_by_api: + google/spanner/admin/database/v1: + - python-gapic-namespace=google.cloud + - python-gapic-name=spanner_admin_database + google/spanner/admin/instance/v1: + - python-gapic-namespace=google.cloud + - python-gapic-name=spanner_admin_instance + google/spanner/v1: + - python-gapic-namespace=google.cloud + product_documentation_override: https://cloud.google.com/spanner/docs/ + issue_tracker_override: https://issuetracker.google.com/issues?q=componentid:190851%2B%20status:open + metadata_name_override: spanner + default_version: v1 + - name: google-cloud-speech + version: 2.38.0 + apis: + - path: google/cloud/speech/v2 + - path: google/cloud/speech/v1 + - path: google/cloud/speech/v1p1beta1 + description_override: enables easy integration of Google speech recognition technologies into developer applications. Send audio and receive a text transcription from the Speech-to-Text API service. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - google/cloud/speech_v1/helpers.py + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + - tests/unit/test_helpers.py + python: + library_type: GAPIC_COMBO + name_pretty_override: Cloud Speech + product_documentation_override: https://cloud.google.com/speech-to-text/docs/ + metadata_name_override: speech + default_version: v1 + - name: google-cloud-storage + version: 3.10.1 + apis: + - path: google/storage/v2 + description_override: 'is a durable and highly available object storage service. Google Cloud Storage is almost infinitely scalable and guarantees consistency: when a write succeeds, the latest copy of the object will be returned to any GET, globally.' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + library_type: GAPIC_MANUAL + opt_args_by_api: + google/storage/v2: + - python-gapic-namespace=google.cloud + - python-gapic-name=_storage + - warehouse-package-name=google-cloud-storage + name_pretty_override: Google Cloud Storage + product_documentation_override: https://cloud.google.com/storage + issue_tracker_override: https://issuetracker.google.com/savedsearches/559782 + metadata_name_override: storage + default_version: v2 + - name: google-cloud-storage-control + version: 1.11.0 + apis: + - path: google/storage/control/v2 + description_override: Lets you perform metadata-specific, control plane, and long-running operations apart from the Storage API. Separating these operations from the Storage API improves API standardization and lets you run faster releases. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/storage/control/v2: + - python-gapic-name=storage_control + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-storage-control + name_pretty_override: Storage Control API + product_documentation_override: https://cloud.google.com/storage/docs/reference/rpc/google.storage.control.v2 + default_version: v2 + - name: google-cloud-storage-transfer + version: 1.20.0 + apis: + - path: google/storagetransfer/v1 + description_override: Secure, low-cost services for transferring data from cloud or on-premises sources. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/storagetransfer/v1: + - python-gapic-name=storage_transfer + - python-gapic-namespace=google.cloud + - warehouse-package-name=google-cloud-storage-transfer + name_pretty_override: Storage Transfer Service + metadata_name_override: storagetransfer + default_version: v1 + - name: google-cloud-storagebatchoperations + version: 0.6.0 + apis: + - path: google/cloud/storagebatchoperations/v1 + description_override: 'null ' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Storage Batch Operations API + product_documentation_override: https://cloud.google.com/storage/docs/batch-operations/overview + default_version: v1 + - name: google-cloud-storageinsights + version: 0.4.0 + apis: + - path: google/cloud/storageinsights/v1 + description_override: The Storage Insights inventory report feature helps you manage your object storage at scale. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Storage Insights API + product_documentation_override: https://cloud.google.com/storage/docs/insights/storage-insights + metadata_name_override: storageinsights + default_version: v1 + - name: google-cloud-support + version: 0.4.0 + apis: + - path: google/cloud/support/v2 + - path: google/cloud/support/v2beta + description_override: Manages Google Cloud technical support cases for Customer Care support offerings. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Google Cloud Support API + product_documentation_override: https://cloud.google.com/support/docs/reference/support-api + api_shortname_override: support + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: support + default_version: v2 + - name: google-cloud-talent + version: 2.20.0 + apis: + - path: google/cloud/talent/v4 + - path: google/cloud/talent/v4beta1 + description_override: Cloud Talent Solution provides the capability to create, read, update, and delete job postings, as well as search jobs based on keywords and filters. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Talent Solution + metadata_name_override: talent + default_version: v4 + - name: google-cloud-tasks + version: 2.22.0 + apis: + - path: google/cloud/tasks/v2 + - path: google/cloud/tasks/v2beta3 + - path: google/cloud/tasks/v2beta2 + description_override: a fully managed service that allows you to manage the execution, dispatch and delivery of a large number of distributed tasks. You can asynchronously perform work outside of a user request. Your tasks can be executed on App Engine or any arbitrary HTTP endpoint. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + python: + product_documentation_override: https://cloud.google.com/tasks/docs/ + metadata_name_override: cloudtasks + default_version: v2 + - name: google-cloud-telcoautomation + version: 0.5.0 + apis: + - path: google/cloud/telcoautomation/v1 + - path: google/cloud/telcoautomation/v1alpha1 + description_override: APIs to automate 5G deployment and management of cloud infrastructure and network functions. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Telco Automation API + default_version: v1 + - name: google-cloud-testutils + version: 1.7.1 + python: + library_type: OTHER + name_pretty_override: Python Test Utils for Google Cloud + client_documentation_override: https://github.com/googleapis/google-cloud-python/packages/google-cloud-testutils + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: google-cloud-test-utils + - name: google-cloud-texttospeech + version: 2.36.0 + apis: + - path: google/cloud/texttospeech/v1 + - path: google/cloud/texttospeech/v1beta1 + description_override: enables easy integration of Google text recognition technologies into developer applications. Send text and receive synthesized audio output from the Cloud Text-to-Speech API service. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + python: + name_pretty_override: Google Cloud Text-to-Speech + metadata_name_override: texttospeech + default_version: v1 + - name: google-cloud-tpu + version: 1.26.0 + apis: + - path: google/cloud/tpu/v2 + - path: google/cloud/tpu/v1 + - path: google/cloud/tpu/v2alpha1 + description_override: Cloud Tensor Processing Units (TPUs) are Google's custom-developed application-specific integrated circuits (ASICs) used to accelerate machine learning workloads. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/tpu/v1: + - transport=grpc + google/cloud/tpu/v2: + - transport=grpc+rest + google/cloud/tpu/v2alpha1: + - transport=grpc + metadata_name_override: tpu + default_version: v1 + - name: google-cloud-trace + version: 1.19.0 + apis: + - path: google/devtools/cloudtrace/v2 + - path: google/devtools/cloudtrace/v1 + description_override: is a distributed tracing system that collects latency data from your applications and displays it in the Google Cloud Platform Console. You can track how requests propagate through your application and receive detailed near real-time performance insights. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/devtools/cloudtrace/v1: + - python-gapic-namespace=google.cloud + - python-gapic-name=trace + google/devtools/cloudtrace/v2: + - python-gapic-name=trace + - python-gapic-namespace=google.cloud + metadata_name_override: cloudtrace + default_version: v2 + - name: google-cloud-translate + version: 3.25.0 + apis: + - path: google/cloud/translate/v3 + - path: google/cloud/translate/v3beta1 + description_override: can dynamically translate text between thousands of language pairs. Translation lets websites and programs programmatically integrate with the translation service. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - docs/client.rst + - docs/v2.rst + - google/cloud/translate_v2 + - google/cloud/translate_v2/__init__.py + - google/cloud/translate_v2/_http.py + - google/cloud/translate_v2/client.py + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + - tests/unit/v2 + - tests/unit/v2/test__http.py + - tests/unit/v2/test_client.py + python: + library_type: GAPIC_COMBO + opt_args_by_api: + google/cloud/translate/v3: + - python-gapic-name=translate + google/cloud/translate/v3beta1: + - python-gapic-name=translate + product_documentation_override: https://cloud.google.com/translate/docs/ + metadata_name_override: translate + default_version: v3 + - name: google-cloud-vectorsearch + version: 0.9.0 + apis: + - path: google/cloud/vectorsearch/v1 + - path: google/cloud/vectorsearch/v1beta + description_override: |- + The Vector Search API provides a fully-managed, highly performant, and + scalable vector database designed to power next-generation search, + recommendation, and generative AI applications. It allows you to store, + index, and query your data and its corresponding vector embeddings through + a simple, intuitive interface. With Vector Search, you can define custom + schemas for your data, insert objects with associated metadata, + automatically generate embeddings from your data, and perform fast + approximate nearest neighbor (ANN) searches to find semantically similar + items at scale. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Vector Search API + product_documentation_override: https://docs.cloud.google.com/vertex-ai/docs/vector-search-2/overview + default_version: v1 + - name: google-cloud-video-live-stream + version: 1.16.0 + apis: + - path: google/cloud/video/livestream/v1 + description_override: transcodes mezzanine live signals into direct-to-consumer streaming formats, including Dynamic Adaptive Streaming over HTTP (DASH/MPEG-DASH), and HTTP Live Streaming (HLS), for multiple device platforms. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/video/livestream/v1: + - python-gapic-name=live_stream + - python-gapic-namespace=google.cloud.video + - warehouse-package-name=google-cloud-video-live-stream + metadata_name_override: livestream + default_version: v1 + - name: google-cloud-video-stitcher + version: 0.11.0 + apis: + - path: google/cloud/video/stitcher/v1 + description_override: The Video Stitcher API helps you generate dynamic content for delivery to client devices. You can call the Video Stitcher API from your servers to dynamically insert ads into video-on-demand and livestreams for your users. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + metadata_name_override: videostitcher + default_version: v1 + - name: google-cloud-video-transcoder + version: 1.20.0 + apis: + - path: google/cloud/video/transcoder/v1 + description_override: allows you to transcode videos into a variety of formats. The Transcoder API benefits broadcasters, production companies, businesses, and individuals looking to transform their video content for use across a variety of user devices. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + metadata_name_override: transcoder + default_version: v1 + - name: google-cloud-videointelligence + version: 2.19.0 + apis: + - path: google/cloud/videointelligence/v1 + - path: google/cloud/videointelligence/v1p3beta1 + - path: google/cloud/videointelligence/v1p2beta1 + - path: google/cloud/videointelligence/v1p1beta1 + - path: google/cloud/videointelligence/v1beta2 + description_override: makes videos searchable, and discoverable, by extracting metadata with an easy to use API. You can now search every moment of every video file in your catalog and find every occurrence as well as its significance. It quickly annotates videos stored in Google Cloud Storage, and helps you identify key nouns entities of your video, and when they occur within the video. Separate signal from noise, by retrieving relevant information at the video, shot or per frame. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + python: + opt_args_by_api: + google/cloud/videointelligence/v1: + - transport=grpc+rest + google/cloud/videointelligence/v1beta2: + - transport=grpc+rest + google/cloud/videointelligence/v1p1beta1: + - transport=grpc+rest + google/cloud/videointelligence/v1p2beta1: + - transport=grpc+rest + google/cloud/videointelligence/v1p3beta1: + - transport=grpc + name_pretty_override: Video Intelligence + product_documentation_override: https://cloud.google.com/video-intelligence/docs/ + metadata_name_override: videointelligence + default_version: v1 + - name: google-cloud-vision + version: 3.13.0 + apis: + - path: google/cloud/vision/v1 + - path: google/cloud/vision/v1p4beta1 + - path: google/cloud/vision/v1p3beta1 + - path: google/cloud/vision/v1p2beta1 + - path: google/cloud/vision/v1p1beta1 + description_override: allows developers to easily integrate vision detection features within applications, including image labeling, face and landmark detection, optical character recognition (OCR), and tagging of explicit content. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - google/cloud/vision_helpers + - google/cloud/vision_helpers/__init__.py + - google/cloud/vision_helpers/decorators.py + - tests/system + - tests/system/__init__.py + - tests/system/smoke_test.py + - tests/unit/test_decorators.py + - tests/unit/test_helpers.py + python: + library_type: GAPIC_COMBO + product_documentation_override: https://cloud.google.com/vision/docs/ + metadata_name_override: vision + default_version: v1 + - name: google-cloud-visionai + version: 0.5.0 + apis: + - path: google/cloud/visionai/v1 + - path: google/cloud/visionai/v1alpha1 + description_override: Easily build and deploy Vertex AI Vision applications using a single platform. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Vision AI API + issue_tracker_override: https://issuetracker.google.com/issues/new?component=187174&pli=1&template=1161261 + default_version: v1 + - name: google-cloud-vm-migration + version: 1.16.0 + apis: + - path: google/cloud/vmmigration/v1 + description_override: ' for Compute Engine migrates VMs from your on-premises data center into Compute Engine.' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/vmmigration/v1: + - warehouse-package-name=google-cloud-vm-migration + name_pretty_override: Cloud VM Migration + metadata_name_override: vmmigration + default_version: v1 + - name: google-cloud-vmwareengine + version: 1.11.0 + apis: + - path: google/cloud/vmwareengine/v1 + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Google Cloud VMware Engine + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: vmwareengine + default_version: v1 + - name: google-cloud-vpc-access + version: 1.16.0 + apis: + - path: google/cloud/vpcaccess/v1 + description_override: provides networking functionality to Compute Engine virtual machine (VM) instances, Google Kubernetes Engine (GKE) containers, and the App Engine flexible environment. VPC provides networking for your cloud-based services that is global, scalable, and flexible. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/vpcaccess/v1: + - warehouse-package-name=google-cloud-vpc-access + name_pretty_override: Virtual Private Cloud + metadata_name_override: vpcaccess + default_version: v1 + - name: google-cloud-webrisk + version: 1.21.0 + apis: + - path: google/cloud/webrisk/v1 + - path: google/cloud/webrisk/v1beta1 + description_override: is a Google Cloud service that lets client applications check URLs against Google's constantly updated lists of unsafe web resources. Unsafe web resources include social engineering sites—such as phishing and deceptive sites—and sites that host malware or unwanted software. With the Web Risk API, you can quickly identify known bad sites, warn users before they click infected links, and prevent users from posting links to known infected pages from your site. The Web Risk API includes data on more than a million unsafe URLs and stays up to date by examining billions of URLs each day. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + product_documentation_override: https://cloud.google.com/web-risk/docs/ + metadata_name_override: webrisk + default_version: v1 + - name: google-cloud-websecurityscanner + version: 1.20.0 + apis: + - path: google/cloud/websecurityscanner/v1 + - path: google/cloud/websecurityscanner/v1beta + - path: google/cloud/websecurityscanner/v1alpha + description_override: identifies security vulnerabilities in your App Engine, Compute Engine, and Google Kubernetes Engine web applications. It crawls your application, following all links within the scope of your starting URLs, and attempts to exercise as many user inputs and event handlers as possible. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Cloud Security Scanner + product_documentation_override: https://cloud.google.com/security-scanner/docs/ + api_shortname_override: securitycenter + api_id_override: securitycenter.googleapis.com + metadata_name_override: websecurityscanner + default_version: v1 + - name: google-cloud-workflows + version: 1.21.0 + apis: + - path: google/cloud/workflows/v1 + - path: google/cloud/workflows/executions/v1 + - path: google/cloud/workflows/executions/v1beta + - path: google/cloud/workflows/v1beta + description_override: Orchestrate and automate Google Cloud and HTTP-based API services with serverless workflows. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/cloud/workflows/executions/v1: + - transport=grpc + google/cloud/workflows/executions/v1beta: + - transport=grpc + google/cloud/workflows/v1: + - transport=grpc+rest + google/cloud/workflows/v1beta: + - transport=grpc+rest + name_pretty_override: Cloud Workflows + metadata_name_override: workflows + default_version: v1 + - name: google-cloud-workloadmanager + version: 0.2.0 + apis: + - path: google/cloud/workloadmanager/v1 + description_override: |- + Workload Manager is a service that provides tooling for enterprise + workloads to automate the deployment and validation of your workloads + against best practices and recommendations. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Workload Manager API + default_version: v1 + - name: google-cloud-workstations + version: 0.8.0 + apis: + - path: google/cloud/workstations/v1 + - path: google/cloud/workstations/v1beta + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + product_documentation_override: https://cloud.google.com/workstations/ + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: workstations + default_version: v1 + - name: google-crc32c + version: 1.8.0 + python: + library_type: OTHER + name_pretty_override: A python wrapper of the C library 'Google CRC32C' + client_documentation_override: https://github.com/googleapis/python-crc32c + issue_tracker_override: https://github.com/googleapis/python-crc32c/issues + skip_readme_copy: true + - name: google-geo-type + version: 0.6.0 + apis: + - path: google/geo/type + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/unit/gapic/type/test_type.py + python: + library_type: OTHER + name_pretty_override: Geo Type Protos + product_documentation_override: https://mapsplatform.google.com/maps-products + api_id_override: type.googleapis.com + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: geotype + default_version: apiVersion + - name: google-maps-addressvalidation + version: 0.6.0 + apis: + - path: google/maps/addressvalidation/v1 + description_override: Address Validation lets you validate and correct address inputs with Places data powered by Google Maps Platform. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/maps/addressvalidation/v1: + - proto-plus-deps=google.geo.type + name_pretty_override: Address Validation API + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: addressvalidation + default_version: v1 + - name: google-maps-areainsights + version: 0.4.0 + apis: + - path: google/maps/areainsights/v1 + description_override: 'Places Insights API. ' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Places Insights API + api_shortname_override: areainsights + default_version: v1 + - name: google-maps-fleetengine + version: 0.5.0 + apis: + - path: google/maps/fleetengine/v1 + description_override: Enables Fleet Engine for access to the On Demand Rides and Deliveries and Last Mile Fleet Solution APIs. Customer's use of Google Maps Content in the Cloud Logging Services is subject to the Google Maps Platform Terms of Service located at https://cloud.google.com/maps-platform/terms. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/maps/fleetengine/v1: + - python-gapic-namespace=google.maps + - proto-plus-deps=google.geo.type + name_pretty_override: Local Rides and Deliveries API + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: fleetengine + default_version: v1 + - name: google-maps-fleetengine-delivery + version: 0.5.0 + apis: + - path: google/maps/fleetengine/delivery/v1 + description_override: Enables Fleet Engine for access to the On Demand Rides and Deliveries and Last Mile Fleet Solution APIs. Customer's use of Google Maps Content in the Cloud Logging Services is subject to the Google Maps Platform Terms of Service located at https://cloud.google.com/maps-platform/terms. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/maps/fleetengine/delivery/v1: + - python-gapic-namespace=google.maps + - python-gapic-name=fleetengine_delivery + - proto-plus-deps=google.geo.type + name_pretty_override: Last Mile Fleet Solution Delivery API + api_shortname_override: fleetengine + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: fleetengine-delivery + default_version: v1 + - name: google-maps-geocode + version: 0.2.0 + apis: + - path: google/maps/geocode/v4 + description_override: |- + Convert addresses into geographic coordinates (geocoding), which you can + use to place markers or position the map. This API also allows you to + convert geographic coordinates into an address (reverse geocoding). + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/maps/geocode/v4: + - proto-plus-deps=google.geo.type + name_pretty_override: Geocoding API + api_shortname_override: geocoding-backend + client_documentation_override: https://cloud.google.com/python/docs/reference/google-maps-geocode/latest + default_version: v4 + - name: google-maps-mapsplatformdatasets + version: 0.7.0 + apis: + - path: google/maps/mapsplatformdatasets/v1 + description_override: Maps Platform Datasets API + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Maps Platform Datasets API + product_documentation_override: https://developers.google.com/maps + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: mapsplatformdatasets + default_version: v1 + - name: google-maps-navconnect + version: 0.1.0 + apis: + - path: google/maps/navconnect/v1 + description_override: Navigation Connect API. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Navigation Connect API + client_documentation_override: https://cloud.google.com/python/docs/reference/google-maps-navconnect/latest + default_version: v1 + - name: google-maps-places + version: 0.8.0 + apis: + - path: google/maps/places/v1 + description_override: The Places API allows developers to access a variety of search and retrieval endpoints for a Place. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/maps/places/v1: + - autogen-snippets=False + - proto-plus-deps=google.geo.type + name_pretty_override: Places API + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: places + default_version: v1 + - name: google-maps-routeoptimization + version: 0.4.0 + apis: + - path: google/maps/routeoptimization/v1 + description_override: The Route Optimization API assigns tasks and routes to a vehicle fleet, optimizing against the objectives and constraints that you supply for your transportation goals. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Route Optimization API + default_version: v1 + - name: google-maps-routing + version: 0.10.0 + apis: + - path: google/maps/routing/v2 + description_override: Help your users find the ideal way to get from A to Z with comprehensive data and real-time traffic. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/maps/routing/v2: + - proto-plus-deps=google.geo.type + name_pretty_override: Google Maps Routing + api_shortname_override: routing + api_id_override: routing.googleapis.com + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + metadata_name_override: routing + default_version: v2 + - name: google-maps-solar + version: 0.5.0 + apis: + - path: google/maps/solar/v1 + description_override: The Google Maps Platform Solar API is a service focused on helping accelerate solar and energy system installations. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + name_pretty_override: Solar API + default_version: v1 + - name: google-resumable-media + version: 2.8.2 + python: + library_type: CORE + name_pretty_override: Google Resumable Media + client_documentation_override: https://cloud.google.com/python/docs/reference/google-resumable-media/latest + - name: google-shopping-css + version: 0.5.0 + apis: + - path: google/shopping/css/v1 + description_override: Programmatically manage your Comparison Shopping Service (CSS) account data at scale. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/shopping/css/v1: + - proto-plus-deps=google.shopping.type + name_pretty_override: CSS API + default_version: v1 + - name: google-shopping-merchant-accounts + version: 1.5.0 + apis: + - path: google/shopping/merchant/accounts/v1 + - path: google/shopping/merchant/accounts/v1beta + description_override: Programmatically manage your Merchant Center accounts. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/shopping/merchant/accounts/v1: + - proto-plus-deps=google.shopping.type + - python-gapic-name=merchant_accounts + - python-gapic-namespace=google.shopping + google/shopping/merchant/accounts/v1beta: + - proto-plus-deps=google.shopping.type + - python-gapic-name=merchant_accounts + - python-gapic-namespace=google.shopping + name_pretty_override: Merchant API + api_shortname_override: accounts + api_id_override: accounts.googleapis.com + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1 + - name: google-shopping-merchant-conversions + version: 1.3.0 + apis: + - path: google/shopping/merchant/conversions/v1 + - path: google/shopping/merchant/conversions/v1beta + description_override: Programmatically manage your Merchant Center accounts. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/shopping/merchant/conversions/v1: + - python-gapic-namespace=google.shopping + - python-gapic-name=merchant_conversions + google/shopping/merchant/conversions/v1beta: + - python-gapic-name=merchant_conversions + - python-gapic-namespace=google.shopping + name_pretty_override: Merchant API + api_shortname_override: conversions + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1 + - name: google-shopping-merchant-datasources + version: 1.4.0 + apis: + - path: google/shopping/merchant/datasources/v1 + - path: google/shopping/merchant/datasources/v1beta + description_override: Programmatically manage your Merchant Center accounts. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/shopping/merchant/datasources/v1: + - proto-plus-deps=google.shopping.type + - python-gapic-name=merchant_datasources + - python-gapic-namespace=google.shopping + google/shopping/merchant/datasources/v1beta: + - proto-plus-deps=google.shopping.type + - python-gapic-name=merchant_datasources + - python-gapic-namespace=google.shopping + name_pretty_override: Merchant API + api_shortname_override: datasources + api_id_override: datasources.googleapis.com + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1 + - name: google-shopping-merchant-inventories + version: 1.3.0 + apis: + - path: google/shopping/merchant/inventories/v1 + - path: google/shopping/merchant/inventories/v1beta + description_override: Programmatically manage your Merchant Center accounts. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/shopping/merchant/inventories/v1: + - proto-plus-deps=google.shopping.type + - python-gapic-name=merchant_inventories + - python-gapic-namespace=google.shopping + google/shopping/merchant/inventories/v1beta: + - proto-plus-deps=google.shopping.type + - python-gapic-namespace=google.shopping + - python-gapic-name=merchant_inventories + name_pretty_override: Merchant Inventories API + api_shortname_override: inventories + api_id_override: inventories.googleapis.com + default_version: v1 + - name: google-shopping-merchant-issueresolution + version: 1.3.0 + apis: + - path: google/shopping/merchant/issueresolution/v1 + - path: google/shopping/merchant/issueresolution/v1beta + description_override: 'Programmatically manage your Merchant Center Accounts. ' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/shopping/merchant/issueresolution/v1: + - proto-plus-deps=google.shopping.type + - python-gapic-name=merchant_issueresolution + - python-gapic-namespace=google.shopping + google/shopping/merchant/issueresolution/v1beta: + - proto-plus-deps=google.shopping.type + - python-gapic-namespace=google.shopping + - python-gapic-name=merchant_issueresolution + name_pretty_override: Merchant API + api_shortname_override: issueresolution + api_id_override: issueresolution.googleapis.com + default_version: v1 + - name: google-shopping-merchant-lfp + version: 1.3.0 + apis: + - path: google/shopping/merchant/lfp/v1 + - path: google/shopping/merchant/lfp/v1beta + description_override: Programmatically manage your Merchant Center accounts. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/shopping/merchant/lfp/v1: + - proto-plus-deps=google.shopping.type + - python-gapic-name=merchant_lfp + - python-gapic-namespace=google.shopping + google/shopping/merchant/lfp/v1beta: + - proto-plus-deps=google.shopping.type + - python-gapic-name=merchant_lfp + - python-gapic-namespace=google.shopping + name_pretty_override: Merchant API + api_shortname_override: lfp + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1 + - name: google-shopping-merchant-notifications + version: 1.3.0 + apis: + - path: google/shopping/merchant/notifications/v1 + - path: google/shopping/merchant/notifications/v1beta + description_override: Programmatically manage your Merchant Center accounts. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/shopping/merchant/notifications/v1: + - python-gapic-namespace=google.shopping + - python-gapic-name=merchant_notifications + google/shopping/merchant/notifications/v1beta: + - proto-plus-deps=google.shopping.type + - python-gapic-namespace=google.shopping + - python-gapic-name=merchant_notifications + name_pretty_override: Merchant API + api_shortname_override: notifications + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1 + - name: google-shopping-merchant-ordertracking + version: 1.3.0 + apis: + - path: google/shopping/merchant/ordertracking/v1 + - path: google/shopping/merchant/ordertracking/v1beta + description_override: 'Programmatically manage your Merchant Center Accounts. ' + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/shopping/merchant/ordertracking/v1: + - proto-plus-deps=google.shopping.type + - python-gapic-name=merchant_ordertracking + - python-gapic-namespace=google.shopping + google/shopping/merchant/ordertracking/v1beta: + - proto-plus-deps=google.shopping.type + - python-gapic-namespace=google.shopping + - python-gapic-name=merchant_ordertracking + name_pretty_override: Merchant API + api_shortname_override: ordertracking + api_id_override: ordertracking.googleapis.com + default_version: v1 + - name: google-shopping-merchant-products + version: 1.5.0 + apis: + - path: google/shopping/merchant/products/v1 + - path: google/shopping/merchant/products/v1beta + description_override: Programmatically manage your Merchant Center accounts. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/shopping/merchant/products/v1: + - proto-plus-deps=google.shopping.type + - python-gapic-name=merchant_products + - python-gapic-namespace=google.shopping + google/shopping/merchant/products/v1beta: + - proto-plus-deps=google.shopping.type + - python-gapic-namespace=google.shopping + - python-gapic-name=merchant_products + name_pretty_override: Merchant API + api_shortname_override: products + api_id_override: products.googleapis.com + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1 + - name: google-shopping-merchant-productstudio + version: 0.4.0 + apis: + - path: google/shopping/merchant/productstudio/v1alpha + description_override: Programmatically manage your Merchant Center accounts. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/shopping/merchant/productstudio/v1alpha: + - python-gapic-name=merchant_productstudio + - python-gapic-namespace=google.shopping + name_pretty_override: Merchant ProductStudio API + api_shortname_override: productstudio + api_id_override: productstudio.googleapis.com + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1alpha + - name: google-shopping-merchant-promotions + version: 1.3.0 + apis: + - path: google/shopping/merchant/promotions/v1 + - path: google/shopping/merchant/promotions/v1beta + description_override: Programmatically manage your Merchant Center accounts. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/shopping/merchant/promotions/v1: + - proto-plus-deps=google.shopping.type + - python-gapic-name=merchant_promotions + - python-gapic-namespace=google.shopping + google/shopping/merchant/promotions/v1beta: + - proto-plus-deps=google.shopping.type + - python-gapic-namespace=google.shopping + - python-gapic-name=merchant_promotions + name_pretty_override: Merchant API + api_shortname_override: promotions + api_id_override: promotions.googleapis.com + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1 + - name: google-shopping-merchant-quota + version: 1.4.0 + apis: + - path: google/shopping/merchant/quota/v1 + - path: google/shopping/merchant/quota/v1beta + description_override: Programmatically manage your Merchant Center accounts. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/shopping/merchant/quota/v1: + - python-gapic-namespace=google.shopping + - python-gapic-name=merchant_quota + google/shopping/merchant/quota/v1beta: + - python-gapic-namespace=google.shopping + - python-gapic-name=merchant_quota + name_pretty_override: Shopping Merchant Quota + default_version: v1 + - name: google-shopping-merchant-reports + version: 1.3.0 + apis: + - path: google/shopping/merchant/reports/v1 + - path: google/shopping/merchant/reports/v1beta + - path: google/shopping/merchant/reports/v1alpha + description_override: Programmatically manage your Merchant Center accounts + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/shopping/merchant/reports/v1: + - proto-plus-deps=google.shopping.type + - python-gapic-name=merchant_reports + - python-gapic-namespace=google.shopping + google/shopping/merchant/reports/v1alpha: + - proto-plus-deps=google.shopping.type + - python-gapic-name=merchant_reports + - python-gapic-namespace=google.shopping + google/shopping/merchant/reports/v1beta: + - proto-plus-deps=google.shopping.type + - python-gapic-name=merchant_reports + - python-gapic-namespace=google.shopping + name_pretty_override: Merchant Reports API + api_shortname_override: reports + api_id_override: reports.googleapis.com + default_version: v1 + - name: google-shopping-merchant-reviews + version: 0.5.0 + apis: + - path: google/shopping/merchant/reviews/v1beta + description_override: Programmatically manage your Merchant Center Accounts + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + python: + opt_args_by_api: + google/shopping/merchant/reviews/v1beta: + - proto-plus-deps=google.shopping.type + - python-gapic-name=merchant_reviews + - python-gapic-namespace=google.shopping + name_pretty_override: Merchant Reviews API + api_shortname_override: reviews + api_id_override: reviews.googleapis.com + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: v1beta + - name: google-shopping-type + version: 1.4.0 + apis: + - path: google/shopping/type + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - tests/unit/gapic/type/test_type.py + python: + name_pretty_override: Shopping Type Protos + product_documentation_override: https://developers.google.com/merchant/api + api_shortname_override: type + api_id_override: type.googleapis.com + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: apiVersion + - name: googleapis-common-protos + version: 1.74.0 + apis: + - path: google/api + - path: google/cloud + - path: google/rpc + - path: google/type + - path: google/cloud/location + - path: google/logging/type + - path: google/rpc/context + python: + library_type: CORE + proto_only_apis: + - google/api + - google/cloud + - google/rpc + - google/type + - google/cloud/location + - google/logging/type + - google/rpc/context + name_pretty_override: Google APIs Common Protos + product_documentation_override: https://github.com/googleapis/googleapis/tree/master/google + client_documentation_override: https://github.com/googleapis/google-cloud-python/tree/main/packages/googleapis-common-protos + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + default_version: apiVersion + - name: grafeas + version: 1.22.0 + apis: + - path: grafeas/v1 + description_override: An implementation of the Grafeas API, which stores, and enables querying and retrieval of critical metadata about all of your software artifacts. + keep: + - CHANGELOG.md + - docs/CHANGELOG.md + - grafeas.py + - grafeas/__init__.py + - grafeas/grafeas_v1/types.py + python: + library_type: GAPIC_COMBO + opt_args_by_api: + grafeas/v1: + - python-gapic-namespace=grafeas + - warehouse-package-name=grafeas + name_pretty_override: Grafeas + default_version: v1 + - name: grpc-google-iam-v1 + version: 0.14.4 + apis: + - path: google/iam/v1 + python: + proto_only_apis: + - google/iam/v1 + name_pretty_override: Cloud Identity and Access Management + product_documentation_override: https://cloud.google.com/iam/docs/ + api_shortname_override: iam + api_id_override: iam.googleapis.com + client_documentation_override: https://cloud.google.com/python/docs/reference/grpc-iam/latest + metadata_name_override: grpc-iam + default_version: apiVersion + - name: pandas-gbq + version: 0.34.1 + python: + library_type: INTEGRATION + name_pretty_override: Google BigQuery connector for pandas + product_documentation_override: https://cloud.google.com/bigquery + api_id_override: bigquery.googleapis.com + client_documentation_override: https://googleapis.dev/python/pandas-gbq/latest/ + issue_tracker_override: https://github.com/googleapis/python-bigquery-pandas/issues + skip_readme_copy: true + - name: proto-plus + version: 1.27.2 + python: + library_type: CORE + name_pretty_override: Proto Plus + issue_tracker_override: https://github.com/googleapis/google-cloud-python/issues + skip_readme_copy: true + - name: sqlalchemy-bigquery + version: 1.16.0 + python: + library_type: INTEGRATION + name_pretty_override: SQLAlchemy dialect for BigQuery + api_id_override: bigquery.googleapis.com + client_documentation_override: https://googleapis.dev/python/sqlalchemy-bigquery/latest/index.html + - name: sqlalchemy-spanner + version: 1.17.3 + python: + library_type: INTEGRATION + name_pretty_override: Spanner dialect for SQLAlchemy + product_documentation_override: https://cloud.google.com/spanner/docs + api_shortname_override: sqlalchemy-spanner + client_documentation_override: https://github.com/googleapis/python-spanner-sqlalchemy + issue_tracker_override: https://issuetracker.google.com/issues?q=componentid:190851%2B%20status:open + skip_readme_copy: true diff --git a/packages/bigframes/.repo-metadata.json b/packages/bigframes/.repo-metadata.json index c56e1245b5e9..4a1c7ed478e3 100644 --- a/packages/bigframes/.repo-metadata.json +++ b/packages/bigframes/.repo-metadata.json @@ -1,16 +1,14 @@ { - "name": "bigframes", - "name_pretty": "A unified Python API in BigQuery", - "product_documentation": "https://cloud.google.com/bigquery", + "api_id": "bigquery.googleapis.com", + "api_shortname": "bigquery", "client_documentation": "https://cloud.google.com/python/docs/reference/bigframes/latest", + "distribution_name": "bigframes", "issue_tracker": "https://github.com/googleapis/python-bigquery-dataframes/issues", - "release_level": "preview", "language": "python", "library_type": "INTEGRATION", - "repo": "googleapis/google-cloud-python", - "distribution_name": "bigframes", - "api_id": "bigquery.googleapis.com", - "default_version": "", - "codeowner_team": "@googleapis/bigquery-dataframe-team", - "api_shortname": "bigquery" -} + "name": "bigframes", + "name_pretty": "A unified Python API in BigQuery", + "product_documentation": "https://cloud.google.com/bigquery", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/bigframes/docs/README.rst b/packages/bigframes/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/packages/bigframes/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/packages/bigframes/docs/README.rst b/packages/bigframes/docs/README.rst new file mode 100644 index 000000000000..a3aef5380bb1 --- /dev/null +++ b/packages/bigframes/docs/README.rst @@ -0,0 +1,94 @@ +BigQuery DataFrames (BigFrames) +=============================== + + +|GA| |pypi| |versions| + +BigQuery DataFrames (also known as BigFrames) provides a Pythonic DataFrame +and machine learning (ML) API powered by the BigQuery engine. It provides modules +for many use cases, including: + +* `bigframes.pandas `_ + is a pandas API for analytics. Many workloads can be + migrated from pandas to bigframes by just changing a few imports. +* `bigframes.ml `_ + is a scikit-learn-like API for ML. +* `bigframes.bigquery.ai `_ + are a collection of powerful AI methods, powered by Gemini. + +BigQuery DataFrames is an `open-source package `_. + +.. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability +.. |pypi| image:: https://img.shields.io/pypi/v/bigframes.svg + :target: https://pypi.org/project/bigframes/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/bigframes.svg + :target: https://pypi.org/project/bigframes/ + +Getting started with BigQuery DataFrames +---------------------------------------- + +The easiest way to get started is to try the +`BigFrames quickstart `_ +in a `notebook in BigQuery Studio `_. + +To use BigFrames in your local development environment, + +1. Run ``pip install --upgrade bigframes`` to install the latest version. + +2. Setup `Application default credentials `_ + for your local development environment enviroment. + +3. Create a `GCP project with the BigQuery API enabled `_. + +4. Use the ``bigframes`` package to query data. + +.. code-block:: python + + import bigframes.pandas as bpd + + bpd.options.bigquery.project = your_gcp_project_id # Optional in BQ Studio. + bpd.options.bigquery.ordering_mode = "partial" # Recommended for performance. + df = bpd.read_gbq("bigquery-public-data.usa_names.usa_1910_2013") + print( + df.groupby("name") + .agg({"number": "sum"}) + .sort_values("number", ascending=False) + .head(10) + .to_pandas() + ) + +Documentation +------------- + +To learn more about BigQuery DataFrames, visit these pages + +* `Introduction to BigQuery DataFrames (BigFrames) `_ +* `Sample notebooks `_ +* `API reference `_ +* `Source code (GitHub) `_ + +License +------- + +BigQuery DataFrames is distributed with the `Apache-2.0 license +`_. + +It also contains code derived from the following third-party packages: + +* `Ibis `_ +* `pandas `_ +* `Python `_ +* `scikit-learn `_ +* `XGBoost `_ +* `SQLGlot `_ + +For details, see the `third_party +`_ +directory. + + +Contact Us +---------- + +For further help and provide feedback, you can email us at `bigframes-feedback@google.com `_. diff --git a/packages/bigframes/scripts/conftest.py b/packages/bigframes/scripts/conftest.py deleted file mode 100644 index 0d55bd4b478f..000000000000 --- a/packages/bigframes/scripts/conftest.py +++ /dev/null @@ -1,8 +0,0 @@ -import sys -from pathlib import Path - -# inserts scripts into path so that tests can import -project_root = Path(__file__).parent.parent -scripts_dir = project_root / "scripts" - -sys.path.insert(0, str(scripts_dir)) diff --git a/packages/bigframes/scripts/create_gcs.py b/packages/bigframes/scripts/create_gcs.py deleted file mode 100644 index bdb8a23ddc92..000000000000 --- a/packages/bigframes/scripts/create_gcs.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This script create the bigtable resources required for -# bigframes.streaming testing if they don't already exist - -import os -import sys -from pathlib import Path - -import google.cloud.exceptions as exceptions -import google.cloud.storage as gcs -from google.cloud.storage import transfer_manager - -PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT") - -if not PROJECT_ID: - print( - "Please set GOOGLE_CLOUD_PROJECT environment variable before running.", - file=sys.stderr, - ) - sys.exit(1) - - -def create_bucket(client: gcs.Client) -> gcs.Bucket: - bucket_name = "bigframes_blob_test" - - print(f"Creating bucket: {bucket_name}") - try: - bucket = client.create_bucket(bucket_name) - print(f"Bucket {bucket_name} created. ") - - except exceptions.Conflict: - print(f"Bucket {bucket_name} already exists.") - bucket = client.bucket(bucket_name) - - return bucket - - -def upload_data(bucket: gcs.Bucket): - # from https://cloud.google.com/storage/docs/samples/storage-transfer-manager-upload-directory - source_directory = "scripts/data/" - workers = 8 - - # First, recursively get all files in `directory` as Path objects. - directory_as_path_obj = Path(source_directory) - paths = directory_as_path_obj.rglob("*") - - # Filter so the list only includes files, not directories themselves. - file_paths = [path for path in paths if path.is_file()] - - # These paths are relative to the current working directory. Next, make them - # relative to `directory` - relative_paths = [path.relative_to(source_directory) for path in file_paths] - - # Finally, convert them all to strings. - string_paths = [str(path) for path in relative_paths] - - print("Found {} files.".format(len(string_paths))) - - # Start the upload. - results = transfer_manager.upload_many_from_filenames( - bucket, string_paths, source_directory=source_directory, max_workers=workers - ) - - for name, result in zip(string_paths, results): - # The results list is either `None` or an exception for each filename in - # the input list, in order. - - if isinstance(result, Exception): - print("Failed to upload {} due to exception: {}".format(name, result)) - else: - print("Uploaded {} to {}.".format(name, bucket.name)) - - -def main(): - client = gcs.Client(project=PROJECT_ID) - - bucket = create_bucket(client) - - upload_data(bucket) - - -if __name__ == "__main__": - main() diff --git a/packages/bigframes/scripts/create_load_test_tables.py b/packages/bigframes/scripts/create_load_test_tables.py deleted file mode 100644 index d94a33aa5cc9..000000000000 --- a/packages/bigframes/scripts/create_load_test_tables.py +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import math -import os -import pathlib -import sys - -import google.cloud.bigquery as bigquery - -REPO_ROOT = pathlib.Path(__file__).parent.parent - -PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT") - -if not PROJECT_ID: - print( - "Please set GOOGLE_CLOUD_PROJECT environment variable before running.", - file=sys.stderr, - ) - sys.exit(1) - -DATASET_ID = f"{PROJECT_ID}.load_testing" -TABLE_ID = f"{DATASET_ID}.scalars" -TABLE_ID_FORMAT = f"{DATASET_ID}.scalars_{{size}}" - -KB_BYTES = 1000 -MB_BYTES = 1000 * KB_BYTES -GB_BYTES = 1000 * MB_BYTES -TB_BYTES = 1000 * GB_BYTES -SIZES = ( - ("1mb", MB_BYTES), - ("10mb", 10 * MB_BYTES), - ("100mb", 100 * MB_BYTES), - ("1gb", GB_BYTES), - ("10gb", 10 * GB_BYTES), - ("100gb", 100 * GB_BYTES), - ("1tb", TB_BYTES), -) -SCHEMA_PATH = REPO_ROOT / "tests" / "data" / "scalars_schema.json" -DATA_PATH = REPO_ROOT / "tests" / "data" / "scalars.jsonl" -BQCLIENT = bigquery.Client() - - -def create_dataset(): - dataset = bigquery.Dataset(DATASET_ID) - BQCLIENT.create_dataset(dataset, exists_ok=True) - - -def load_scalars_table(): - schema = BQCLIENT.schema_from_json(SCHEMA_PATH) - job_config = bigquery.LoadJobConfig() - job_config.schema = schema - job_config.write_disposition = bigquery.WriteDisposition.WRITE_TRUNCATE - job_config.source_format = bigquery.SourceFormat.NEWLINE_DELIMITED_JSON - - print(f"Creating {TABLE_ID}") - with open(DATA_PATH, "rb") as data_file: - BQCLIENT.load_table_from_file( - data_file, - TABLE_ID, - job_config=job_config, - ).result() - - -def multiply_table(previous_table_id, target_table_id, multiplier): - clauses = [f"SELECT * FROM `{previous_table_id}`"] * multiplier - query = " UNION ALL ".join(clauses) - job_config = bigquery.QueryJobConfig() - job_config.destination = target_table_id - job_config.write_disposition = bigquery.WriteDisposition.WRITE_TRUNCATE - print(f"Creating {target_table_id}, {multiplier} x {previous_table_id}") - BQCLIENT.query_and_wait(query, job_config=job_config) - - -def create_tables(): - base_table = BQCLIENT.get_table(TABLE_ID) - previous_bytes = base_table.num_bytes - previous_table_id = TABLE_ID - - for table_suffix, target_bytes in SIZES: - # Make sure we exceed the desired bytes by adding to the multiplier. - multiplier = math.ceil(target_bytes / previous_bytes) + 1 - target_table_id = TABLE_ID_FORMAT.format(size=table_suffix) - multiply_table(previous_table_id, target_table_id, multiplier) - - table = BQCLIENT.get_table(target_table_id) - previous_bytes = table.num_bytes - previous_table_id = target_table_id - - -def main(): - create_dataset() - load_scalars_table() - create_tables() - - -if __name__ == "__main__": - main() diff --git a/packages/bigframes/scripts/create_read_gbq_colab_benchmark_tables.py b/packages/bigframes/scripts/create_read_gbq_colab_benchmark_tables.py deleted file mode 100644 index 727a1e116ac0..000000000000 --- a/packages/bigframes/scripts/create_read_gbq_colab_benchmark_tables.py +++ /dev/null @@ -1,541 +0,0 @@ -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import annotations - -import argparse -import base64 -import concurrent.futures -import datetime -import json -import math -import time -from typing import Any, Iterable, MutableSequence, Sequence - -import numpy as np -from google.cloud import bigquery - -# --- Input Data --- -# Generated by querying bigquery-magics usage. See internal issue b/420984164. -TABLE_STATS: dict[str, list[float]] = { - "percentile": [9, 19, 29, 39, 49, 59, 69, 79, 89, 99], - "materialized_or_scanned_bytes": [ - 0.0, - 0.0, - 4102.0, - 76901.0, - 351693.0, - 500000.0, - 500000.0, - 1320930.0, - 17486432.0, - 1919625975.0, - ], - "avg_row_bytes": [ - 0.00014346299635435792, - 0.005370969708923197, - 0.3692756731526246, - 4.079344721151818, - 7.5418, - 12.528863516404146, - 22.686258546389798, - 48.69689224091025, - 100.90817356205852, - 2020, - ], - "materialized_mb": [ - 0.0, - 0.0, - 0.004102, - 0.076901, - 0.351693, - 0.5, - 0.5, - 1.32093, - 17.486432, - 1919.625975, - ], -} - -BIGQUERY_DATA_TYPE_SIZES = { - "BOOL": 1, - "DATE": 8, - "FLOAT64": 8, - "INT64": 8, - "DATETIME": 8, - "TIMESTAMP": 8, - "TIME": 8, - "NUMERIC": 16, - # Flexible types. - # JSON base size is its content, BYTES/STRING have 2 byte overhead + content - "JSON": 0, - "BYTES": 2, - "STRING": 2, -} -FIXED_TYPES = [ - "BOOL", - "INT64", - "FLOAT64", - "NUMERIC", - "DATE", - "DATETIME", - "TIMESTAMP", - "TIME", -] -FLEXIBLE_TYPES = ["STRING", "BYTES", "JSON"] - -JSON_CHAR_LIST = list("abcdef") -STRING_CHAR_LIST = list("abcdefghijklmnopqrstuvwxyz0123456789") - -# --- Helper Functions --- - - -def get_bq_schema(target_row_size_bytes: int) -> Sequence[tuple[str, str, int | None]]: - """ - Determines the BigQuery table schema to match the target_row_size_bytes. - Prioritizes fixed-size types for diversity, then uses flexible types. - Returns a list of tuples: (column_name, type_name, length_for_flexible_type). - Length is None for fixed-size types. - """ - schema: MutableSequence[tuple[str, str, int | None]] = [] - current_size = 0 - col_idx = 0 - - for bq_type in FIXED_TYPES: - # For simplicity, we'll allow slight overage if only fixed fields are chosen. - if current_size >= target_row_size_bytes: - break - - type_size = BIGQUERY_DATA_TYPE_SIZES[bq_type] - schema.append((f"col_{bq_type.lower()}_{col_idx}", bq_type, None)) - current_size += type_size - col_idx += 1 - - # Use flexible-size types to fill remaining space - - # Attempt to add one of each flexible type if space allows - if current_size < target_row_size_bytes: - remaining_bytes_for_content = target_row_size_bytes - current_size - - # For simplicity, divide the remaing bytes evenly across the flexible - # columns. - target_size = int(math.ceil(remaining_bytes_for_content / len(FLEXIBLE_TYPES))) - - for bq_type in FLEXIBLE_TYPES: - base_cost = BIGQUERY_DATA_TYPE_SIZES[bq_type] - min_content_size = max(0, target_size - base_cost) - - schema.append( - (f"col_{bq_type.lower()}_{col_idx}", bq_type, min_content_size) - ) - current_size += base_cost + min_content_size - col_idx += 1 - - return schema - - -def generate_bool_batch( - num_rows: int, rng: np.random.Generator, content_length: int | None = None -) -> np.ndarray: - return rng.choice([True, False], size=num_rows) - - -def generate_int64_batch( - num_rows: int, rng: np.random.Generator, content_length: int | None = None -) -> np.ndarray: - return rng.integers(-(10**18), 10**18, size=num_rows, dtype=np.int64) - - -def generate_float64_batch( - num_rows: int, rng: np.random.Generator, content_length: int | None = None -) -> np.ndarray: - return rng.random(size=num_rows) * 2 * 10**10 - 10**10 - - -def generate_numeric_batch( - num_rows: int, rng: np.random.Generator, content_length: int | None = None -) -> np.ndarray: - raw_numerics = rng.random(size=num_rows) * 2 * 10**28 - 10**28 - format_numeric_vectorized = np.vectorize(lambda x: f"{x:.9f}") - return format_numeric_vectorized(raw_numerics) - - -def generate_date_batch( - num_rows: int, rng: np.random.Generator, content_length: int | None = None -) -> np.ndarray: - start_date_ord = datetime.date(1, 1, 1).toordinal() - max_days = (datetime.date(9999, 12, 31) - datetime.date(1, 1, 1)).days - day_offsets = rng.integers(0, max_days + 1, size=num_rows) - date_ordinals = start_date_ord + day_offsets - return np.array( - [ - datetime.date.fromordinal(int(ordinal)).isoformat() - for ordinal in date_ordinals - ] - ) - - -def generate_numpy_datetimes(num_rows: int, rng: np.random.Generator) -> np.ndarray: - # Generate seconds from a broad range (e.g., year 1 to 9999) - # Note: Python's datetime.timestamp() might be limited by system's C mktime. - # For broader range with np.datetime64, it's usually fine. - # Let's generate epoch seconds relative to Unix epoch for np.datetime64 compatibility - min_epoch_seconds = int( - datetime.datetime(1, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc).timestamp() - ) - # Max for datetime64[s] is far out, but let's bound it reasonably for BQ. - max_epoch_seconds = int( - datetime.datetime( - 9999, 12, 28, 23, 59, 59, tzinfo=datetime.timezone.utc - ).timestamp() - ) - - epoch_seconds = rng.integers( - min_epoch_seconds, - max_epoch_seconds + 1, - size=num_rows, - dtype=np.int64, - ) - microseconds_offset = rng.integers(0, 1000000, size=num_rows, dtype=np.int64) - - # Create datetime64[s] from epoch seconds and add microseconds as timedelta64[us] - np_timestamps_s = epoch_seconds.astype("datetime64[s]") - np_microseconds_td = microseconds_offset.astype("timedelta64[us]") - return np_timestamps_s + np_microseconds_td - - -def generate_datetime_batch( - num_rows: int, rng: np.random.Generator, content_length: int | None = None -) -> np.ndarray: - np_datetimes = generate_numpy_datetimes(num_rows, rng) - - # np.datetime_as_string produces 'YYYY-MM-DDTHH:MM:SS.ffffff' - # BQ DATETIME typically uses a space separator: 'YYYY-MM-DD HH:MM:SS.ffffff' - datetime_strings = np.datetime_as_string(np_datetimes, unit="us") - return np.array([s.replace("T", " ") for s in datetime_strings]) - - -def generate_timestamp_batch( - num_rows: int, rng: np.random.Generator, content_length: int | None = None -) -> np.ndarray: - np_datetimes = generate_numpy_datetimes(num_rows, rng) - - # Convert to string with UTC timezone indicator - # np.datetime_as_string with timezone='UTC' produces 'YYYY-MM-DDTHH:MM:SS.ffffffZ' - # BigQuery generally accepts this for TIMESTAMP. - return np.datetime_as_string(np_datetimes, unit="us", timezone="UTC") - - -def generate_time_batch( - num_rows: int, rng: np.random.Generator, content_length: int | None = None -) -> np.ndarray: - hours = rng.integers(0, 24, size=num_rows) - minutes = rng.integers(0, 60, size=num_rows) - seconds = rng.integers(0, 60, size=num_rows) - microseconds = rng.integers(0, 1000000, size=num_rows) - time_list = [ - datetime.time(hours[i], minutes[i], seconds[i], microseconds[i]).isoformat() - for i in range(num_rows) - ] - return np.array(time_list) - - -def generate_json_row(content_length: int, rng: np.random.Generator) -> str: - json_val_len = max(0, content_length - 5) - json_val_chars = rng.choice(JSON_CHAR_LIST, size=json_val_len) - json_obj = {"k": "".join(json_val_chars)} - return json.dumps(json_obj) - - -def generate_json_batch( - num_rows: int, rng: np.random.Generator, content_length: int | None = None -) -> np.ndarray: - content_length = content_length if content_length is not None else 10 - json_list = [ - generate_json_row(content_length=content_length, rng=rng) - for _ in range(num_rows) - ] - return np.array(json_list) - - -def generate_string_batch( - num_rows: int, rng: np.random.Generator, content_length: int | None = None -) -> np.ndarray: - content_length = content_length if content_length is not None else 1 - content_length = max(0, content_length) - chars_array = rng.choice(STRING_CHAR_LIST, size=(num_rows, content_length)) - return np.array(["".join(row_chars) for row_chars in chars_array]) - - -def generate_bytes_batch( - num_rows: int, rng: np.random.Generator, content_length: int | None = None -) -> np.ndarray: - content_length = content_length if content_length is not None else 1 - content_length = max(0, content_length) - return np.array( - [ - base64.b64encode(rng.bytes(content_length)).decode("utf-8") - for _ in range(num_rows) - ] - ) - - -BIGQUERY_DATA_TYPE_GENERATORS = { - "BOOL": generate_bool_batch, - "DATE": generate_date_batch, - "FLOAT64": generate_float64_batch, - "INT64": generate_int64_batch, - "DATETIME": generate_datetime_batch, - "TIMESTAMP": generate_timestamp_batch, - "TIME": generate_time_batch, - "NUMERIC": generate_numeric_batch, - "JSON": generate_json_batch, - "BYTES": generate_bytes_batch, - "STRING": generate_string_batch, -} - - -def generate_work_items( - table_id: str, - schema: Sequence[tuple[str, str, int | None]], - num_rows: int, - batch_size: int, -) -> Iterable[tuple[str, Sequence[tuple[str, str, int | None]], int]]: - """ - Generates work items of appropriate batch sizes. - """ - if num_rows == 0: - return - - generated_rows_total = 0 - - while generated_rows_total < num_rows: - current_batch_size = min(batch_size, num_rows - generated_rows_total) - if current_batch_size == 0: - break - - yield (table_id, schema, current_batch_size) - generated_rows_total += current_batch_size - - -def generate_batch( - schema: Sequence[tuple[str, str, int | None]], - num_rows: int, - rng: np.random.Generator, -) -> list[dict[str, Any]]: - col_names_ordered = [s[0] for s in schema] - - columns_data_batch = {} - for col_name, bq_type, length in schema: - generate_batch = BIGQUERY_DATA_TYPE_GENERATORS[bq_type] - columns_data_batch[col_name] = generate_batch( - num_rows, rng, content_length=length - ) - - # Turn numpy objects into Python objects. - # https://stackoverflow.com/a/32850511/101923 - columns_data_batch_json = {} - for column in columns_data_batch: - columns_data_batch_json[column] = columns_data_batch[column].tolist() - - # Assemble batch of rows - batch_data = [] - for i in range(num_rows): - row = { - col_name: columns_data_batch_json[col_name][i] - for col_name in col_names_ordered - } - batch_data.append(row) - - return batch_data - - -def generate_and_load_batch( - client: bigquery.Client, - table_id: str, - schema_def: Sequence[tuple[str, str, int | None]], - num_rows: int, - rng: np.random.Generator, -): - bq_schema = [] - for col_name, type_name, _ in schema_def: - bq_schema.append(bigquery.SchemaField(col_name, type_name)) - table = bigquery.Table(table_id, schema=bq_schema) - - generated_data_chunk = generate_batch(schema_def, num_rows, rng) - errors = client.insert_rows_json(table, generated_data_chunk) - if errors: - raise ValueError(f"Encountered errors while inserting sub-batch: {errors}") - - -def create_and_load_table( - client: bigquery.Client | None, - project_id: str, - dataset_id: str, - table_name: str, - schema_def: Sequence[tuple[str, str, int | None]], - num_rows: int, - executor: concurrent.futures.Executor, -): - """Creates a BigQuery table and loads data into it by consuming a data generator.""" - - if not client: - print(f"Simulating: Generated schema: {schema_def}") - return - - # BQ client library streaming insert batch size (rows per API call) - # This is different from data_gen_batch_size which is for generating data. - # We can make BQ_LOAD_BATCH_SIZE smaller than data_gen_batch_size if needed. - BQ_LOAD_BATCH_SIZE = 500 - - # Actual BigQuery operations occur here because both project_id and dataset_id are provided - print( - f"Attempting BigQuery operations for table {table_name} in project '{project_id}', dataset '{dataset_id}'." - ) - table_id = f"{project_id}.{dataset_id}.{table_name}" - - bq_schema = [] - for col_name, type_name, _ in schema_def: - bq_schema.append(bigquery.SchemaField(col_name, type_name)) - - table = bigquery.Table(table_id, schema=bq_schema) - print(f"(Re)creating table {table_id}...") - table = client.create_table(table, exists_ok=True) - print(f"Table {table_id} created successfully or already exists.") - - # Query in case there's something in the streaming buffer already. - table_rows = next( - iter(client.query_and_wait(f"SELECT COUNT(*) FROM `{table_id}`")) - )[0] - print(f"Table {table_id} has {table_rows} rows.") - num_rows = max(0, num_rows - table_rows) - - if num_rows <= 0: - print(f"No rows to load. Requested {num_rows} rows. Skipping.") - return - - print(f"Starting to load {num_rows} rows into {table_id} in batches...") - - previous_status_time = 0.0 - generated_rows_total = 0 - - for completed_rows in executor.map( - worker_process_item, - generate_work_items( - table_id, - schema_def, - num_rows, - BQ_LOAD_BATCH_SIZE, - ), - ): - generated_rows_total += completed_rows - - current_time = time.monotonic() - if current_time - previous_status_time > 5: - print(f"Wrote {generated_rows_total} out of {num_rows} rows.") - previous_status_time = current_time - - -worker_client: bigquery.Client | None = None -worker_rng: np.random.Generator | None = None - - -def worker_initializer(project_id: str | None): - global worker_client, worker_rng - - # One client per process, since multiprocessing and client connections don't - # play nicely together. - if project_id is not None: - worker_client = bigquery.Client(project=project_id) - - worker_rng = np.random.default_rng() - - -def worker_process_item( - work_item: tuple[str, Sequence[tuple[str, str, int | None]], int], -): - global worker_client, worker_rng - - if worker_client is None or worker_rng is None: - raise ValueError("Worker not initialized.") - - table_id, schema_def, num_rows = work_item - generate_and_load_batch(worker_client, table_id, schema_def, num_rows, worker_rng) - return num_rows - - -# --- Main Script Logic --- -def main(): - """Main function to create and populate BigQuery tables.""" - - parser = argparse.ArgumentParser( - description="Generate and load BigQuery benchmark tables." - ) - parser.add_argument( - "-p", - "--project_id", - type=str, - default=None, - help="Google Cloud Project ID. If not provided, script runs in simulation mode.", - ) - parser.add_argument( - "-d", - "--dataset_id", - type=str, - default=None, - help="BigQuery Dataset ID within the project. If not provided, script runs in simulation mode.", - ) - args = parser.parse_args() - - num_percentiles = len(TABLE_STATS["percentile"]) - client = None - - if args.project_id and args.dataset_id: - client = bigquery.Client(project=args.project_id) - dataset = bigquery.Dataset(f"{args.project_id}.{args.dataset_id}") - client.create_dataset(dataset, exists_ok=True) - - with concurrent.futures.ProcessPoolExecutor( - initializer=worker_initializer, initargs=(args.project_id,) - ) as executor: - for i in range(num_percentiles): - percentile = TABLE_STATS["percentile"][i] - avg_row_bytes_raw = TABLE_STATS["avg_row_bytes"][i] - table_bytes_raw = TABLE_STATS["materialized_or_scanned_bytes"][i] - - target_table_bytes = max(1, int(math.ceil(table_bytes_raw))) - target_row_bytes = max(1, int(math.ceil(avg_row_bytes_raw))) - num_rows = max(1, int(math.ceil(target_table_bytes / target_row_bytes))) - - table_name = f"percentile_{percentile:02d}" - print(f"\n--- Processing Table: {table_name} ---") - print(f"Target average row bytes (rounded up): {target_row_bytes}") - print(f"Number of rows (rounded up): {num_rows}") - - schema_definition = get_bq_schema(target_row_bytes) - print(f"Generated Schema: {schema_definition}") - - create_and_load_table( - client, - args.project_id or "", - args.dataset_id or "", - table_name, - schema_definition, - num_rows, - executor, - ) - - -if __name__ == "__main__": - main() diff --git a/packages/bigframes/scripts/create_read_gbq_colab_benchmark_tables_test.py b/packages/bigframes/scripts/create_read_gbq_colab_benchmark_tables_test.py deleted file mode 100644 index 56c9cb2bc568..000000000000 --- a/packages/bigframes/scripts/create_read_gbq_colab_benchmark_tables_test.py +++ /dev/null @@ -1,334 +0,0 @@ -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import annotations - -import base64 -import datetime -import json -import math -import re - -import numpy as np -import pytest - -# Assuming the script to be tested is in the same directory or accessible via PYTHONPATH -from create_read_gbq_colab_benchmark_tables import ( - BIGQUERY_DATA_TYPE_SIZES, - generate_batch, - generate_work_items, - get_bq_schema, -) - - -# Helper function to calculate estimated row size from schema -def _calculate_row_size(schema: list[tuple[str, str, int | None]]) -> int: - """Calculates the estimated byte size of a row based on the schema. - Note: This is a simplified calculation for testing and might not perfectly - match BigQuery's internal storage, especially for complex types or NULLs. - """ - size = 0 - for _, bq_type, length in schema: - if bq_type in ["STRING", "BYTES", "JSON"]: - # Base cost (e.g., 2 bytes) + content length - size += BIGQUERY_DATA_TYPE_SIZES[bq_type] + ( - length if length is not None else 0 - ) - elif bq_type in BIGQUERY_DATA_TYPE_SIZES: - size += BIGQUERY_DATA_TYPE_SIZES[bq_type] - else: - raise AssertionError(f"Got unexpected type {bq_type}") - return size - - -# --- Tests for get_bq_schema --- - - -def test_get_bq_schema_zero_bytes(): - assert get_bq_schema(0) == [] - - -def test_get_bq_schema_one_byte(): - schema = get_bq_schema(1) - - assert len(schema) == 1 - assert schema[0][1] == "BOOL" # ('col_bool_fallback_0', 'BOOL', None) or similar - assert _calculate_row_size(schema) == 1 - - -def test_get_bq_schema_exact_fixed_fit(): - # BOOL (1) + INT64 (8) = 9 bytes - target_size = 9 - schema = get_bq_schema(target_size) - - assert len(schema) == 2 - assert schema[0][1] == "BOOL" - assert schema[1][1] == "INT64" - assert _calculate_row_size(schema) == target_size - - -def test_get_bq_schema_needs_flexible_string(): - # Sum of all fixed types: - # BOOL 1, INT64 8, FLOAT64 8, NUMERIC 16, DATE 8, DATETIME 8, TIMESTAMP 8, TIME 8 - # Total = 1+8+8+16+8+8+8+8 = 65 - target_size = 65 + 1 - schema = get_bq_schema(target_size) - - assert _calculate_row_size(schema) == 65 + 2 + 2 + 1 - - string_cols = [s for s in schema if s[1] == "STRING"] - assert len(string_cols) == 1 - assert string_cols[0][2] == 0 - - bytes_cols = [s for s in schema if s[1] == "BYTES"] - assert len(bytes_cols) == 1 - assert bytes_cols[0][2] == 0 - - json_cols = [s for s in schema if s[1] == "JSON"] - assert len(json_cols) == 1 - assert json_cols[0][2] == 1 - - -def test_get_bq_schema_flexible_expansion(): - # Sum of all fixed types: - # BOOL 1, INT64 8, FLOAT64 8, NUMERIC 16, DATE 8, DATETIME 8, TIMESTAMP 8, TIME 8 - # Total = 1+8+8+16+8+8+8+8 = 65 - target_size = 65 + 3 * 5 - schema = get_bq_schema(target_size) - - assert _calculate_row_size(schema) == target_size - - string_cols = [s for s in schema if s[1] == "STRING"] - assert len(string_cols) == 1 - assert string_cols[0][2] == 3 - - bytes_cols = [s for s in schema if s[1] == "BYTES"] - assert len(bytes_cols) == 1 - assert bytes_cols[0][2] == 3 - - json_cols = [s for s in schema if s[1] == "JSON"] - assert len(json_cols) == 1 - assert json_cols[0][2] == 5 - - -def test_get_bq_schema_all_fixed_types_possible(): - # Sum of all fixed types: - # BOOL 1, INT64 8, FLOAT64 8, NUMERIC 16, DATE 8, DATETIME 8, TIMESTAMP 8, TIME 8 - # Total = 1+8+8+16+8+8+8+8 = 65 - target_size = 65 - schema = get_bq_schema(target_size) - - expected_fixed_types = { - "BOOL", - "INT64", - "FLOAT64", - "NUMERIC", - "DATE", - "DATETIME", - "TIMESTAMP", - "TIME", - } - present_types = {s[1] for s in schema} - - assert expected_fixed_types.issubset(present_types) - - # Check if the size is close to target. - # All fixed (65) - calculated_size = _calculate_row_size(schema) - assert calculated_size == target_size - - -def test_get_bq_schema_uniqueness_of_column_names(): - target_size = 100 # A size that generates multiple columns - schema = get_bq_schema(target_size) - - column_names = [s[0] for s in schema] - assert len(column_names) == len(set(column_names)) - - -# --- Tests for generate_work_items --- - - -def test_generate_work_items_zero_rows(): - schema = [("col_int", "INT64", None)] - data_generator = generate_work_items( - "some_table", schema, num_rows=0, batch_size=10 - ) - - # Expect the generator to be exhausted - with pytest.raises(StopIteration): - next(data_generator) - - -def test_generate_work_items_basic_schema_and_batching(): - schema = [("id", "INT64", None), ("is_active", "BOOL", None)] - num_rows = 25 - batch_size = 10 - - generated_rows_count = 0 - batch_count = 0 - for work_item in generate_work_items("some_table", schema, num_rows, batch_size): - table_id, schema_def, num_rows_in_batch = work_item - assert table_id == "some_table" - assert schema_def == schema - assert num_rows_in_batch <= num_rows - assert num_rows_in_batch <= batch_size - batch_count += 1 - generated_rows_count += num_rows_in_batch - - assert generated_rows_count == num_rows - assert batch_count == math.ceil(num_rows / batch_size) # 25/10 = 2.5 -> 3 batches - - -def test_generate_work_items_batch_size_larger_than_num_rows(): - schema = [("value", "FLOAT64", None)] - num_rows = 5 - batch_size = 100 - - generated_rows_count = 0 - batch_count = 0 - for work_item in generate_work_items("some_table", schema, num_rows, batch_size): - table_id, schema_def, num_rows_in_batch = work_item - assert table_id == "some_table" - assert schema_def == schema - assert num_rows_in_batch == num_rows # Should be one batch with all rows - batch_count += 1 - generated_rows_count += num_rows_in_batch - - assert generated_rows_count == num_rows - assert batch_count == 1 - - -def test_generate_work_items_all_datatypes(rng): - schema = [ - ("c_bool", "BOOL", None), - ("c_int64", "INT64", None), - ("c_float64", "FLOAT64", None), - ("c_numeric", "NUMERIC", None), - ("c_date", "DATE", None), - ("c_datetime", "DATETIME", None), - ("c_timestamp", "TIMESTAMP", None), - ("c_time", "TIME", None), - ("c_string", "STRING", 10), - ("c_bytes", "BYTES", 5), - ("c_json", "JSON", 20), # Length for JSON is content hint - ] - num_rows = 3 - batch_size = 2 # To test multiple batches - - total_rows_processed = 0 - for work_item in generate_work_items("some_table", schema, num_rows, batch_size): - table_id, schema_def, num_rows_in_batch = work_item - assert table_id == "some_table" - assert schema_def == schema - assert num_rows_in_batch <= batch_size - assert num_rows_in_batch <= num_rows - - total_rows_processed += num_rows_in_batch - - assert total_rows_processed == num_rows - - -# --- Pytest Fixture for RNG --- -@pytest.fixture -def rng(): - return np.random.default_rng(seed=42) - - -def test_generate_batch_basic_schema(rng): - schema = [("id", "INT64", None), ("is_active", "BOOL", None)] - batch = generate_batch(schema, 5, rng) - - assert len(batch) == 5 - - for row in batch: - assert isinstance(row, dict) - assert "id" in row - assert "is_active" in row - assert isinstance(row["id"], int) - assert isinstance(row["is_active"], bool) - - -def test_generate_batch_all_datatypes(rng): - schema = [ - ("c_bool", "BOOL", None), - ("c_int64", "INT64", None), - ("c_float64", "FLOAT64", None), - ("c_numeric", "NUMERIC", None), - ("c_date", "DATE", None), - ("c_datetime", "DATETIME", None), - ("c_timestamp", "TIMESTAMP", None), - ("c_time", "TIME", None), - ("c_string", "STRING", 10), - ("c_bytes", "BYTES", 5), - ("c_json", "JSON", 20), # Length for JSON is content hint - ] - num_rows = 3 - - date_pattern = re.compile(r"^\d{4}-\d{2}-\d{2}$") - time_pattern = re.compile(r"^\d{2}:\d{2}:\d{2}(\.\d{1,6})?$") - # BQ DATETIME: YYYY-MM-DD HH:MM:SS.ffffff - datetime_pattern = re.compile(r"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}(\.\d{1,6})?$") - # BQ TIMESTAMP (UTC 'Z'): YYYY-MM-DDTHH:MM:SS.ffffffZ - timestamp_pattern = re.compile( - r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{1,6})?Z$" - ) - numeric_pattern = re.compile(r"^-?\d+\.\d{9}$") - - batch = generate_batch(schema, num_rows, rng) - assert len(batch) == num_rows - - for row in batch: - assert isinstance(row["c_bool"], bool) - assert isinstance(row["c_int64"], int) - assert isinstance(row["c_float64"], float) - - assert isinstance(row["c_numeric"], str) - assert numeric_pattern.match(row["c_numeric"]) - - assert isinstance(row["c_date"], str) - assert date_pattern.match(row["c_date"]) - datetime.date.fromisoformat(row["c_date"]) # Check parsable - - assert isinstance(row["c_datetime"], str) - assert datetime_pattern.match(row["c_datetime"]) - datetime.datetime.fromisoformat(row["c_datetime"]) # Check parsable - - assert isinstance(row["c_timestamp"], str) - assert timestamp_pattern.match(row["c_timestamp"]) - # datetime.fromisoformat can parse 'Z' if Python >= 3.11, or needs replace('Z', '+00:00') - dt_obj = datetime.datetime.fromisoformat( - row["c_timestamp"].replace("Z", "+00:00") - ) - assert dt_obj.tzinfo == datetime.timezone.utc - - assert isinstance(row["c_time"], str) - assert time_pattern.match(row["c_time"]) - datetime.time.fromisoformat(row["c_time"]) # Check parsable - - assert isinstance(row["c_string"], str) - assert len(row["c_string"]) == 10 - - c_bytes = base64.b64decode(row["c_bytes"]) - assert isinstance(c_bytes, bytes) - assert len(c_bytes) == 5 - - assert isinstance(row["c_json"], str) - try: - json.loads(row["c_json"]) # Check if it's valid JSON - except json.JSONDecodeError: - pytest.fail(f"Invalid JSON string generated: {row['c_json']}") - # Note: Exact length check for JSON is hard due to content variability and escaping. - # The 'length' parameter for JSON in schema is a hint for content size. - # We are primarily testing that it's valid JSON. diff --git a/packages/bigframes/scripts/create_test_model_vertex.py b/packages/bigframes/scripts/create_test_model_vertex.py deleted file mode 100644 index 946e54773e63..000000000000 --- a/packages/bigframes/scripts/create_test_model_vertex.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import sys - -import bigframes.ml.linear_model -import bigframes.pandas - - -def create_vertex_model(vertex_model_name): - df = bigframes.pandas.read_gbq("bigquery-public-data.ml_datasets.penguins") - - # filter down to the data we want to analyze - adelie_data = df[df.species == "Adelie Penguin (Pygoscelis adeliae)"] - - # drop the columns we don't care about - adelie_data = adelie_data.drop(columns=["species"]) - - # drop rows with nulls to get our training data - training_data = adelie_data.dropna() - - feature_columns = training_data["culmen_length_mm"] - label_columns = training_data[["body_mass_g"]] - - # create model - model = bigframes.ml.linear_model.LinearRegression() - model.fit(feature_columns, label_columns) - - # register to Vertex Registry - model.register(vertex_model_name) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="Get top APIs for which there are no code samples in the docstring." - ) - parser.add_argument( - "-m", - "--model-name", - type=str, - required=True, - action="store", - help="Name of the model in Vertex.", - ) - parser.add_argument( - "-p", - "--project-id", - type=str, - required=False, - action="store", - help="Project id in which the model should be created. " - "By default, a project will be resolved as per https://cloud.google.com/python/docs/reference/google-cloud-core/latest/config#overview.", - ) - - args = parser.parse_args(sys.argv[1:]) - if args.project_id: - bigframes.pandas.options.bigquery.project = args.project_id - - create_vertex_model(args.model_name) diff --git a/packages/bigframes/scripts/data/audio/audio_LJ001-0010.wav b/packages/bigframes/scripts/data/audio/audio_LJ001-0010.wav deleted file mode 100644 index 01a2e68829a5..000000000000 Binary files a/packages/bigframes/scripts/data/audio/audio_LJ001-0010.wav and /dev/null differ diff --git a/packages/bigframes/scripts/data/images/img0.jpg b/packages/bigframes/scripts/data/images/img0.jpg deleted file mode 100644 index 4f9114402b49..000000000000 Binary files a/packages/bigframes/scripts/data/images/img0.jpg and /dev/null differ diff --git a/packages/bigframes/scripts/data/images/img1.jpg b/packages/bigframes/scripts/data/images/img1.jpg deleted file mode 100644 index 15c881bd1afd..000000000000 Binary files a/packages/bigframes/scripts/data/images/img1.jpg and /dev/null differ diff --git a/packages/bigframes/scripts/data/images_exif/test_image_exif.jpg b/packages/bigframes/scripts/data/images_exif/test_image_exif.jpg deleted file mode 100644 index fdfdaf9ad080..000000000000 Binary files a/packages/bigframes/scripts/data/images_exif/test_image_exif.jpg and /dev/null differ diff --git a/packages/bigframes/scripts/data/pdfs/pdfs_sample-local-pdf.pdf b/packages/bigframes/scripts/data/pdfs/pdfs_sample-local-pdf.pdf deleted file mode 100644 index d162cd6877e4..000000000000 Binary files a/packages/bigframes/scripts/data/pdfs/pdfs_sample-local-pdf.pdf and /dev/null differ diff --git a/packages/bigframes/scripts/data/pdfs/test-protected.pdf b/packages/bigframes/scripts/data/pdfs/test-protected.pdf deleted file mode 100644 index 0d8cd28baa46..000000000000 Binary files a/packages/bigframes/scripts/data/pdfs/test-protected.pdf and /dev/null differ diff --git a/packages/bigframes/scripts/dev-utils/tpcds_upload_helper.py b/packages/bigframes/scripts/dev-utils/tpcds_upload_helper.py deleted file mode 100644 index dec5b39768f0..000000000000 --- a/packages/bigframes/scripts/dev-utils/tpcds_upload_helper.py +++ /dev/null @@ -1,596 +0,0 @@ -import argparse -import csv -import os -import sys - -import google.api_core.exceptions -from google.cloud import bigquery - - -def preprocess_csv(input_file_path, output_file_path): - try: - with ( - open(input_file_path, mode="r", newline="", encoding="utf-8") as infile, - open(output_file_path, mode="w", newline="", encoding="utf-8") as outfile, - ): - reader = csv.reader(infile, delimiter="|") - writer = csv.writer(outfile, delimiter="|") - - for row in reader: - writer.writerow(row[:-1]) - except Exception as e: - print(f"An error occurred: {e}") - - -def get_schema(table_name): - schema = { - "customer_address": [ - bigquery.SchemaField("ca_address_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("ca_address_id", "STRING", mode="REQUIRED"), - bigquery.SchemaField("ca_street_number", "STRING", mode="NULLABLE"), - bigquery.SchemaField("ca_street_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("ca_street_type", "STRING", mode="NULLABLE"), - bigquery.SchemaField("ca_suite_number", "STRING", mode="NULLABLE"), - bigquery.SchemaField("ca_city", "STRING", mode="NULLABLE"), - bigquery.SchemaField("ca_county", "STRING", mode="NULLABLE"), - bigquery.SchemaField("ca_state", "STRING", mode="NULLABLE"), - bigquery.SchemaField("ca_zip", "STRING", mode="NULLABLE"), - bigquery.SchemaField("ca_country", "STRING", mode="NULLABLE"), - bigquery.SchemaField("ca_gmt_offset", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ca_location_type", "STRING", mode="NULLABLE"), - ], - "customer_demographics": [ - bigquery.SchemaField("cd_demo_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("cd_gender", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cd_marital_status", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cd_education_status", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cd_purchase_estimate", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cd_credit_rating", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cd_dep_count", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cd_dep_employed_count", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cd_dep_college_count", "INTEGER", mode="NULLABLE"), - ], - "date_dim": [ - bigquery.SchemaField("d_date_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("d_date_id", "STRING", mode="REQUIRED"), - bigquery.SchemaField("d_date", "DATE", mode="NULLABLE"), - bigquery.SchemaField("d_month_seq", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("d_week_seq", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("d_quarter_seq", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("d_year", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("d_dow", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("d_moy", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("d_dom", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("d_qoy", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("d_fy_year", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("d_fy_quarter_seq", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("d_fy_week_seq", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("d_day_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("d_quarter_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("d_holiday", "STRING", mode="NULLABLE"), - bigquery.SchemaField("d_weekend", "STRING", mode="NULLABLE"), - bigquery.SchemaField("d_following_holiday", "STRING", mode="NULLABLE"), - bigquery.SchemaField("d_first_dom", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("d_last_dom", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("d_same_day_ly", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("d_same_day_lq", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("d_current_day", "STRING", mode="NULLABLE"), - bigquery.SchemaField("d_current_week", "STRING", mode="NULLABLE"), - bigquery.SchemaField("d_current_month", "STRING", mode="NULLABLE"), - bigquery.SchemaField("d_current_quarter", "STRING", mode="NULLABLE"), - bigquery.SchemaField("d_current_year", "STRING", mode="NULLABLE"), - ], - "warehouse": [ - bigquery.SchemaField("w_warehouse_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("w_warehouse_id", "STRING", mode="REQUIRED"), - bigquery.SchemaField("w_warehouse_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("w_warehouse_sq_ft", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("w_street_number", "STRING", mode="NULLABLE"), - bigquery.SchemaField("w_street_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("w_street_type", "STRING", mode="NULLABLE"), - bigquery.SchemaField("w_suite_number", "STRING", mode="NULLABLE"), - bigquery.SchemaField("w_city", "STRING", mode="NULLABLE"), - bigquery.SchemaField("w_county", "STRING", mode="NULLABLE"), - bigquery.SchemaField("w_state", "STRING", mode="NULLABLE"), - bigquery.SchemaField("w_zip", "STRING", mode="NULLABLE"), - bigquery.SchemaField("w_country", "STRING", mode="NULLABLE"), - bigquery.SchemaField("w_gmt_offset", "FLOAT", mode="NULLABLE"), - ], - "ship_mode": [ - bigquery.SchemaField("sm_ship_mode_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("sm_ship_mode_id", "STRING", mode="REQUIRED"), - bigquery.SchemaField("sm_type", "STRING", mode="NULLABLE"), - bigquery.SchemaField("sm_code", "STRING", mode="NULLABLE"), - bigquery.SchemaField("sm_carrier", "STRING", mode="NULLABLE"), - bigquery.SchemaField("sm_contract", "STRING", mode="NULLABLE"), - ], - "time_dim": [ - bigquery.SchemaField("t_time_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("t_time_id", "STRING", mode="REQUIRED"), - bigquery.SchemaField("t_time", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("t_hour", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("t_minute", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("t_second", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("t_am_pm", "STRING", mode="NULLABLE"), - bigquery.SchemaField("t_shift", "STRING", mode="NULLABLE"), - bigquery.SchemaField("t_sub_shift", "STRING", mode="NULLABLE"), - bigquery.SchemaField("t_meal_time", "STRING", mode="NULLABLE"), - ], - "reason": [ - bigquery.SchemaField("r_reason_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("r_reason_id", "STRING", mode="REQUIRED"), - bigquery.SchemaField("r_reason_desc", "STRING", mode="NULLABLE"), - ], - "income_band": [ - bigquery.SchemaField("ib_income_band_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("ib_lower_bound", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ib_upper_bound", "INTEGER", mode="NULLABLE"), - ], - "item": [ - bigquery.SchemaField("i_item_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("i_item_id", "STRING", mode="REQUIRED"), - bigquery.SchemaField("i_rec_start_date", "DATE", mode="NULLABLE"), - bigquery.SchemaField("i_rec_end_date", "DATE", mode="NULLABLE"), - bigquery.SchemaField("i_item_desc", "STRING", mode="NULLABLE"), - bigquery.SchemaField("i_current_price", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("i_wholesale_cost", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("i_brand_id", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("i_brand", "STRING", mode="NULLABLE"), - bigquery.SchemaField("i_class_id", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("i_class", "STRING", mode="NULLABLE"), - bigquery.SchemaField("i_category_id", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("i_category", "STRING", mode="NULLABLE"), - bigquery.SchemaField("i_manufact_id", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("i_manufact", "STRING", mode="NULLABLE"), - bigquery.SchemaField("i_size", "STRING", mode="NULLABLE"), - bigquery.SchemaField("i_formulation", "STRING", mode="NULLABLE"), - bigquery.SchemaField("i_color", "STRING", mode="NULLABLE"), - bigquery.SchemaField("i_units", "STRING", mode="NULLABLE"), - bigquery.SchemaField("i_container", "STRING", mode="NULLABLE"), - bigquery.SchemaField("i_manager_id", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("i_product_name", "STRING", mode="NULLABLE"), - ], - "store": [ - bigquery.SchemaField("s_store_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("s_store_id", "STRING", mode="REQUIRED"), - bigquery.SchemaField("s_rec_start_date", "DATE", mode="NULLABLE"), - bigquery.SchemaField("s_rec_end_date", "DATE", mode="NULLABLE"), - bigquery.SchemaField("s_closed_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("s_store_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("s_number_employees", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("s_floor_space", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("s_hours", "STRING", mode="NULLABLE"), - bigquery.SchemaField("s_manager", "STRING", mode="NULLABLE"), - bigquery.SchemaField("s_market_id", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("s_geography_class", "STRING", mode="NULLABLE"), - bigquery.SchemaField("s_market_desc", "STRING", mode="NULLABLE"), - bigquery.SchemaField("s_market_manager", "STRING", mode="NULLABLE"), - bigquery.SchemaField("s_division_id", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("s_division_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("s_company_id", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("s_company_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("s_street_number", "STRING", mode="NULLABLE"), - bigquery.SchemaField("s_street_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("s_street_type", "STRING", mode="NULLABLE"), - bigquery.SchemaField("s_suite_number", "STRING", mode="NULLABLE"), - bigquery.SchemaField("s_city", "STRING", mode="NULLABLE"), - bigquery.SchemaField("s_county", "STRING", mode="NULLABLE"), - bigquery.SchemaField("s_state", "STRING", mode="NULLABLE"), - bigquery.SchemaField("s_zip", "STRING", mode="NULLABLE"), - bigquery.SchemaField("s_country", "STRING", mode="NULLABLE"), - bigquery.SchemaField("s_gmt_offset", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("s_tax_precentage", "FLOAT", mode="NULLABLE"), - ], - "call_center": [ - bigquery.SchemaField("cc_call_center_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("cc_call_center_id", "STRING", mode="REQUIRED"), - bigquery.SchemaField("cc_rec_start_date", "DATE", mode="NULLABLE"), - bigquery.SchemaField("cc_rec_end_date", "DATE", mode="NULLABLE"), - bigquery.SchemaField("cc_closed_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cc_open_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cc_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_class", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_employees", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cc_sq_ft", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cc_hours", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_manager", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_mkt_id", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cc_mkt_class", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_mkt_desc", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_market_manager", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_division", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cc_division_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_company", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cc_company_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_street_number", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_street_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_street_type", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_suite_number", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_city", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_county", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_state", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_zip", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_country", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cc_gmt_offset", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cc_tax_percentage", "FLOAT", mode="NULLABLE"), - ], - "customer": [ - bigquery.SchemaField("c_customer_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("c_customer_id", "STRING", mode="REQUIRED"), - bigquery.SchemaField("c_current_cdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("c_current_hdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("c_current_addr_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("c_first_shipto_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("c_first_sales_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("c_salutation", "STRING", mode="NULLABLE"), - bigquery.SchemaField("c_first_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("c_last_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("c_preferred_cust_flag", "STRING", mode="NULLABLE"), - bigquery.SchemaField("c_birth_day", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("c_birth_month", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("c_birth_year", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("c_birth_country", "STRING", mode="NULLABLE"), - bigquery.SchemaField("c_login", "STRING", mode="NULLABLE"), - bigquery.SchemaField("c_email_address", "STRING", mode="NULLABLE"), - bigquery.SchemaField("c_last_review_date_sk", "STRING", mode="NULLABLE"), - ], - "web_site": [ - bigquery.SchemaField("web_site_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("web_site_id", "STRING", mode="REQUIRED"), - bigquery.SchemaField("web_rec_start_date", "DATE", mode="NULLABLE"), - bigquery.SchemaField("web_rec_end_date", "DATE", mode="NULLABLE"), - bigquery.SchemaField("web_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("web_open_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("web_close_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("web_class", "STRING", mode="NULLABLE"), - bigquery.SchemaField("web_manager", "STRING", mode="NULLABLE"), - bigquery.SchemaField("web_mkt_id", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("web_mkt_class", "STRING", mode="NULLABLE"), - bigquery.SchemaField("web_mkt_desc", "STRING", mode="NULLABLE"), - bigquery.SchemaField("web_market_manager", "STRING", mode="NULLABLE"), - bigquery.SchemaField("web_company_id", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("web_company_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("web_street_number", "STRING", mode="NULLABLE"), - bigquery.SchemaField("web_street_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("web_street_type", "STRING", mode="NULLABLE"), - bigquery.SchemaField("web_suite_number", "STRING", mode="NULLABLE"), - bigquery.SchemaField("web_city", "STRING", mode="NULLABLE"), - bigquery.SchemaField("web_county", "STRING", mode="NULLABLE"), - bigquery.SchemaField("web_state", "STRING", mode="NULLABLE"), - bigquery.SchemaField("web_zip", "STRING", mode="NULLABLE"), - bigquery.SchemaField("web_country", "STRING", mode="NULLABLE"), - bigquery.SchemaField("web_gmt_offset", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("web_tax_percentage", "FLOAT", mode="NULLABLE"), - ], - "store_returns": [ - bigquery.SchemaField("sr_returned_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("sr_return_time_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("sr_item_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("sr_customer_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("sr_cdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("sr_hdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("sr_addr_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("sr_store_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("sr_reason_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("sr_ticket_number", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("sr_return_quantity", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("sr_return_amt", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("sr_return_tax", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("sr_return_amt_inc_tax", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("sr_fee", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("sr_return_ship_cost", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("sr_refunded_cash", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("sr_reversed_charge", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("sr_store_credit", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("sr_net_loss", "FLOAT", mode="NULLABLE"), - ], - "household_demographics": [ - bigquery.SchemaField("hd_demo_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("hd_income_band_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("hd_buy_potential", "STRING", mode="NULLABLE"), - bigquery.SchemaField("hd_dep_count", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("hd_vehicle_count", "INTEGER", mode="NULLABLE"), - ], - "web_page": [ - bigquery.SchemaField("wp_web_page_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("wp_web_page_id", "STRING", mode="REQUIRED"), - bigquery.SchemaField("wp_rec_start_date", "DATE", mode="NULLABLE"), - bigquery.SchemaField("wp_rec_end_date", "DATE", mode="NULLABLE"), - bigquery.SchemaField("wp_creation_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("wp_access_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("wp_autogen_flag", "STRING", mode="NULLABLE"), - bigquery.SchemaField("wp_customer_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("wp_url", "STRING", mode="NULLABLE"), - bigquery.SchemaField("wp_type", "STRING", mode="NULLABLE"), - bigquery.SchemaField("wp_char_count", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("wp_link_count", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("wp_image_count", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("wp_max_ad_count", "INTEGER", mode="NULLABLE"), - ], - "promotion": [ - bigquery.SchemaField("p_promo_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("p_promo_id", "STRING", mode="REQUIRED"), - bigquery.SchemaField("p_start_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("p_end_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("p_item_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("p_cost", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("p_response_target", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("p_promo_name", "STRING", mode="NULLABLE"), - bigquery.SchemaField("p_channel_dmail", "STRING", mode="NULLABLE"), - bigquery.SchemaField("p_channel_email", "STRING", mode="NULLABLE"), - bigquery.SchemaField("p_channel_catalog", "STRING", mode="NULLABLE"), - bigquery.SchemaField("p_channel_tv", "STRING", mode="NULLABLE"), - bigquery.SchemaField("p_channel_radio", "STRING", mode="NULLABLE"), - bigquery.SchemaField("p_channel_press", "STRING", mode="NULLABLE"), - bigquery.SchemaField("p_channel_event", "STRING", mode="NULLABLE"), - bigquery.SchemaField("p_channel_demo", "STRING", mode="NULLABLE"), - bigquery.SchemaField("p_channel_details", "STRING", mode="NULLABLE"), - bigquery.SchemaField("p_purpose", "STRING", mode="NULLABLE"), - bigquery.SchemaField("p_discount_active", "STRING", mode="NULLABLE"), - ], - "catalog_page": [ - bigquery.SchemaField("cp_catalog_page_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("cp_catalog_page_id", "STRING", mode="REQUIRED"), - bigquery.SchemaField("cp_start_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cp_end_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cp_department", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cp_catalog_number", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cp_catalog_page_number", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cp_description", "STRING", mode="NULLABLE"), - bigquery.SchemaField("cp_type", "STRING", mode="NULLABLE"), - ], - "inventory": [ - bigquery.SchemaField("inv_date_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("inv_item_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("inv_warehouse_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("inv_quantity_on_hand", "INTEGER", mode="NULLABLE"), - ], - "catalog_returns": [ - bigquery.SchemaField("cr_returned_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cr_returned_time_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cr_item_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("cr_refunded_customer_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cr_refunded_cdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cr_refunded_hdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cr_refunded_addr_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField( - "cr_returning_customer_sk", "INTEGER", mode="NULLABLE" - ), - bigquery.SchemaField("cr_returning_cdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cr_returning_hdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cr_returning_addr_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cr_call_center_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cr_catalog_page_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cr_ship_mode_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cr_warehouse_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cr_reason_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cr_order_number", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("cr_return_quantity", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cr_return_amount", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cr_return_tax", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cr_return_amt_inc_tax", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cr_fee", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cr_return_ship_cost", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cr_refunded_cash", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cr_reversed_charge", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cr_store_credit", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cr_net_loss", "FLOAT", mode="NULLABLE"), - ], - "web_returns": [ - bigquery.SchemaField("wr_returned_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("wr_returned_time_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("wr_item_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("wr_refunded_customer_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("wr_refunded_cdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("wr_refunded_hdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("wr_refunded_addr_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField( - "wr_returning_customer_sk", "INTEGER", mode="NULLABLE" - ), - bigquery.SchemaField("wr_returning_cdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("wr_returning_hdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("wr_returning_addr_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("wr_web_page_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("wr_reason_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("wr_order_number", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("wr_return_quantity", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("wr_return_amt", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("wr_return_tax", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("wr_return_amt_inc_tax", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("wr_fee", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("wr_return_ship_cost", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("wr_refunded_cash", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("wr_reversed_charge", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("wr_account_credit", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("wr_net_loss", "FLOAT", mode="NULLABLE"), - ], - "web_sales": [ - bigquery.SchemaField("ws_sold_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ws_sold_time_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ws_ship_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ws_item_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("ws_bill_customer_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ws_bill_cdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ws_bill_hdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ws_bill_addr_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ws_ship_customer_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ws_ship_cdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ws_ship_hdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ws_ship_addr_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ws_web_page_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ws_web_site_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ws_ship_mode_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ws_warehouse_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ws_promo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ws_order_number", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("ws_quantity", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ws_wholesale_cost", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ws_list_price", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ws_sales_price", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ws_ext_discount_amt", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ws_ext_sales_price", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ws_ext_wholesale_cost", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ws_ext_list_price", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ws_ext_tax", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ws_coupon_amt", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ws_ext_ship_cost", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ws_net_paid", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ws_net_paid_inc_tax", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ws_net_paid_inc_ship", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ws_net_paid_inc_ship_tax", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ws_net_profit", "FLOAT", mode="NULLABLE"), - ], - "catalog_sales": [ - bigquery.SchemaField("cs_sold_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cs_sold_time_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cs_ship_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cs_bill_customer_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cs_bill_cdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cs_bill_hdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cs_bill_addr_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cs_ship_customer_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cs_ship_cdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cs_ship_hdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cs_ship_addr_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cs_call_center_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cs_catalog_page_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cs_ship_mode_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cs_warehouse_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cs_item_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("cs_promo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cs_order_number", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("cs_quantity", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("cs_wholesale_cost", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cs_list_price", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cs_sales_price", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cs_ext_discount_amt", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cs_ext_sales_price", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cs_ext_wholesale_cost", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cs_ext_list_price", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cs_ext_tax", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cs_coupon_amt", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cs_ext_ship_cost", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cs_net_paid", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cs_net_paid_inc_tax", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cs_net_paid_inc_ship", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cs_net_paid_inc_ship_tax", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("cs_net_profit", "FLOAT", mode="NULLABLE"), - ], - "store_sales": [ - bigquery.SchemaField("ss_sold_date_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ss_sold_time_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ss_item_sk", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("ss_customer_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ss_cdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ss_hdemo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ss_addr_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ss_store_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ss_promo_sk", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ss_ticket_number", "INTEGER", mode="REQUIRED"), - bigquery.SchemaField("ss_quantity", "INTEGER", mode="NULLABLE"), - bigquery.SchemaField("ss_wholesale_cost", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ss_list_price", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ss_sales_price", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ss_ext_discount_amt", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ss_ext_sales_price", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ss_ext_wholesale_cost", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ss_ext_list_price", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ss_ext_tax", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ss_coupon_amt", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ss_net_paid", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ss_net_paid_inc_tax", "FLOAT", mode="NULLABLE"), - bigquery.SchemaField("ss_net_profit", "FLOAT", mode="NULLABLE"), - ], - } - - return schema[table_name] - - -def load_data_to_bigquery(table_name, file_paths, client, dataset_ref, temp_file): - """Loads data from a list of files into a BigQuery table.""" - job_config = bigquery.LoadJobConfig( - source_format=bigquery.SourceFormat.CSV, - skip_leading_rows=0, # No header in .dat files - field_delimiter="|", - schema=get_schema(table_name), - ) - - table_ref = dataset_ref.table(table_name) - table = bigquery.Table(table_ref) - client.create_table(table) - - # Load data from each file - for file_path in sorted(file_paths): - preprocess_csv(file_path, temp_file) - with open(temp_file, "rb") as source_file: - job = client.load_table_from_file( - source_file, table_ref, job_config=job_config - ) - job.result() - print( - f"Loaded data from {file_path} into table {project_id}:{dataset_id}.{table_name}" - ) - - -if __name__ == "__main__": - """ - Loads TPC-DS data to BigQuery. - - This script loads TPC-DS data generated with source code from - https://www.tpc.org/tpc_documents_current_versions/current_specifications5.asp - into BigQuery. - - Note: If the dataset already exists, the script will exit without uploading data. - - Usage: - python tpcds_upload_helper.py --project_id --dataset_id --ds_path - python tpcds_upload_helper.py -d -p -s - """ - parser = argparse.ArgumentParser(description="Load TPC-DS data to BigQuery") - parser.add_argument( - "--project_id", "-p", required=True, help="Google Cloud project ID" - ) - parser.add_argument("--dataset_id", "-d", required=True, help="BigQuery dataset ID") - parser.add_argument( - "--ds_path", "-s", required=True, help="Path to the TPC-DS data directory" - ) - args = parser.parse_args() - - project_id = args.project_id - dataset_id = args.dataset_id - ds_path = args.ds_path - temp_file = "temp.csv" - - # Initialize BigQuery client - client = bigquery.Client(project=project_id) - dataset_ref = client.dataset(dataset_id) - try: - # Quit if dataset exists - client.get_dataset(dataset_ref) - print(f"Dataset {project_id}:{dataset_id} already exists. Skipping.") - sys.exit(1) - except google.api_core.exceptions.NotFound: - # Create the dataset if it doesn't exist - dataset = bigquery.Dataset(dataset_ref) - client.create_dataset(dataset) - print(f"Created dataset {project_id}:{dataset_id}") - - # Iterate through the folders - for table_name in sorted(os.listdir(ds_path)): - table_path = os.path.join(ds_path, table_name) - table_name = table_name.split(".")[0] - if os.path.isdir(table_path): - file_paths = [ - os.path.join(table_path, f) - for f in os.listdir(table_path) - if f.endswith(".dat") - ] - load_data_to_bigquery( - table_name, file_paths, client, dataset_ref, temp_file - ) - - try: - os.remove(temp_file) - print("Removed temporary file: temp.csv") - except FileNotFoundError: - print("Temporary file not found.") diff --git a/packages/bigframes/scripts/get_documentation_coverage.py b/packages/bigframes/scripts/get_documentation_coverage.py deleted file mode 100755 index a6566cafab02..000000000000 --- a/packages/bigframes/scripts/get_documentation_coverage.py +++ /dev/null @@ -1,172 +0,0 @@ -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import importlib -import inspect -import sys -import typing - -import bigframes -import bigframes.pandas as bpd - -PRESENT = "present" -NOT_PRESENT = "not_present" - -CLASSES = [ - bpd.DataFrame, - bpd.Series, - bpd.Index, - bigframes.session.Session, - bigframes.operations.strings.StringMethods, - bigframes.operations.datetimes.DatetimeMethods, - bigframes.operations.structs.StructAccessor, -] - -ML_MODULE_NAMES = [ - "cluster", - "compose", - "decomposition", - "ensemble", - "linear_model", - "metrics", - "model_selection", - "pipeline", - "preprocessing", - "llm", - "forecasting", - "imported", - "remote", -] - -COVERAGE_GENERATORS = { - "documentation": lambda docstr: docstr, - "code samples": lambda docstr: docstr and "**Examples:**" in docstr, -} - -for module_name in ML_MODULE_NAMES: - module = importlib.import_module(f"bigframes.ml.{module_name}") - classes_ = [ - class_ for _, class_ in inspect.getmembers(module, predicate=inspect.isclass) - ] - CLASSES.extend(classes_) - - -def get_coverage_summary( - func: typing.Callable, -) -> typing.Dict[str, typing.Dict[str, typing.List[str]]]: - """Get Summary of the code samples coverage in BigFrames APIs. - - Args: - func (callable): - Function to accept documentation and return whether it satisfies - coverage. - Returns: - Summary: A dictionary of the format - { - class_1: { - "present": [method1, method2, ...], - "not_present": [method3, method4, ...] - }, - class_2: { - ... - } - } - """ - summary: typing.Dict[str, typing.Dict[str, typing.List[str]]] = dict() - - for class_ in CLASSES: - class_key = f"{class_.__module__}.{class_.__name__}" - summary[class_key] = {PRESENT: [], NOT_PRESENT: []} - - members = inspect.getmembers(class_) - - for name, obj in members: - # ignore private methods - if name.startswith("_") and not name.startswith("__"): - continue - - # ignore constructor - if name == "__init__": - continue - - def predicate(impl): - return ( - # This includes class methods like `from_dict`, `from_records` - inspect.ismethod(impl) - # This includes instance methods like `dropna`, join` - or inspect.isfunction(impl) - # This includes properties like `shape`, `values` but not - # generic properties like `__weakref__` - or (inspect.isdatadescriptor(impl) and not name.startswith("__")) - ) - - if not predicate(obj): - continue - - # At this point we have a property or a public method - impl = getattr(class_, name) - - docstr = inspect.getdoc(impl) - coverage_present = func(docstr) - key = PRESENT if coverage_present else NOT_PRESENT - summary[class_key][key].append(name) - - return summary - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="Get a summary of documentation coverage in BigFrames APIs." - ) - parser.add_argument( - "-c", - "--code-samples", - type=bool, - action=argparse.BooleanOptionalAction, - default=False, - help="Whether to calculate code samples coverage. By default the tool" - " calculates the documentation (docstring) coverage.", - ) - parser.add_argument( - "-d", - "--details", - type=bool, - action=argparse.BooleanOptionalAction, - default=False, - help="Whether to print APIs with and without the coverage.", - ) - - args = parser.parse_args(sys.argv[1:]) - - scenario = "code samples" if args.code_samples else "documentation" - summary = get_coverage_summary(COVERAGE_GENERATORS[scenario]) - - total_with_code_samples = 0 - total = 0 - for class_, class_summary in summary.items(): - apis_with_code_samples = len(class_summary[PRESENT]) - total_with_code_samples += apis_with_code_samples - - apis_total = len(class_summary[PRESENT]) + len(class_summary[NOT_PRESENT]) - total += apis_total - - coverage = 100 * apis_with_code_samples / apis_total - print(f"{class_}: {coverage:.1f}% ({apis_with_code_samples}/{apis_total})") - if args.details: - print(f"===> APIs WITH {scenario}: {class_summary[PRESENT]}") - print(f"===> APIs WITHOUT {scenario}: {class_summary[NOT_PRESENT]}") - - coverage = 100 * total_with_code_samples / total - print(f"Total: {coverage:.1f}% ({total_with_code_samples}/{total})") diff --git a/packages/bigframes/scripts/manage_cloud_functions.py b/packages/bigframes/scripts/manage_cloud_functions.py deleted file mode 100644 index ccf588bde7c2..000000000000 --- a/packages/bigframes/scripts/manage_cloud_functions.py +++ /dev/null @@ -1,228 +0,0 @@ -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import datetime as dt -import sys -import time - -import google.api_core.exceptions -from google.cloud import functions_v2 - -GCF_REGIONS_ALL = [ - "asia-east1", - "asia-east2", - "asia-northeast1", - "asia-northeast2", - "europe-north1", - "europe-southwest1", - "europe-west1", - "europe-west2", - "europe-west4", - "europe-west8", - "europe-west9", - "us-central1", - "us-east1", - "us-east4", - "us-east5", - "us-south1", - "us-west1", - "asia-east2", - "asia-northeast3", - "asia-southeast1", - "asia-southeast2", - "asia-south1", - "asia-south2", - "australia-southeast1", - "australia-southeast2", - "europe-central2", - "europe-west2", - "europe-west3", - "europe-west6", - "northamerica-northeast1", - "northamerica-northeast2", - "southamerica-east1", - "southamerica-west1", - "us-west2", - "us-west3", - "us-west4", -] - -GCF_CLIENT = functions_v2.FunctionServiceClient() - - -def get_bigframes_functions(project, region): - parent = f"projects/{project}/locations/{region}" - functions = GCF_CLIENT.list_functions( - functions_v2.ListFunctionsRequest(parent=parent) - ) - # Filter bigframes created functions - functions = [ - function - for function in functions - if function.name.startswith( - f"projects/{project}/locations/{region}/functions/bigframes-" - ) - ] - - return functions - - -def summarize_gcfs(args): - """Summarize number of bigframes cloud functions in various regions.""" - - region_counts = {} - for region in args.regions: - functions = get_bigframes_functions(args.project_id, region) - functions_count = len(functions) - - # Exclude reporting regions with 0 bigframes GCFs - if functions_count == 0: - continue - - # Count how many GCFs are newer than a day - recent = 0 - for f in functions: - age = dt.datetime.now() - dt.datetime.fromtimestamp( - f.update_time.timestamp() - ) - if age.total_seconds() < args.recency_cutoff: - recent += 1 - - region_counts[region] = (functions_count, recent) - - for item in sorted( - region_counts.items(), key=lambda item: item[1][0], reverse=True - ): - region = item[0] - count, recent = item[1] - print( - "{}: Total={}, Recent={}, Older={}".format( - region, count, recent, count - recent - ) - ) - - -def cleanup_gcfs(args): - """Clean-up bigframes cloud functions in the given regions.""" - max_delete_per_region = args.number - - for region in args.regions: - functions = get_bigframes_functions(args.project_id, region) - count = 0 - for f in functions: - age = dt.datetime.now() - dt.datetime.fromtimestamp( - f.update_time.timestamp() - ) - if age.total_seconds() >= args.recency_cutoff: - try: - count += 1 - GCF_CLIENT.delete_function(name=f.name) - print( - f"[{region}]: deleted [{count}] {f.name} last updated on {f.update_time}" - ) - if count >= max_delete_per_region: - break - # Mostly there is a 60 mutations per minute quota, we want to use 10% of - # that for this clean-up, i.e. 6 mutations per minute. So wait for - # 60/6 = 10 seconds - time.sleep(10) - except google.api_core.exceptions.NotFound: - # Most likely the function was deleted otherwise - pass - except google.api_core.exceptions.ResourceExhausted: - # Stop deleting in this region for now - print( - f"Failed to delete function in region {region} due to quota exhaustion. Pausing for 2 minutes." - ) - time.sleep(120) - - -def list_str(values): - return [val for val in values.split(",") if val] - - -def get_project_from_environment(): - from google.cloud import bigquery - - return bigquery.Client().project - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="Manage cloud functions created to serve bigframes remote functions." - ) - parser.add_argument( - "-p", - "--project-id", - type=str, - required=False, - action="store", - help="GCP project-id. If not provided, the project-id resolved by the" - " BigQuery client from the user environment would be used.", - ) - parser.add_argument( - "-r", - "--regions", - type=list_str, - required=False, - default=GCF_REGIONS_ALL, - action="store", - help="Cloud functions region(s). If multiple regions, Specify comma separated (e.g. region1,region2)", - ) - - def hours_to_timedelta(hrs): - return dt.timedelta(hours=int(hrs)).total_seconds() - - parser.add_argument( - "-c", - "--recency-cutoff", - type=hours_to_timedelta, - required=False, - default=hours_to_timedelta("24"), - action="store", - help="Number of hours, cloud functions older than which should be considered stale (worthy of cleanup).", - ) - - subparsers = parser.add_subparsers(title="subcommands", required=True) - parser_summary = subparsers.add_parser( - "summary", - help="BigFrames cloud functions summary.", - description="Show the bigframes cloud functions summary.", - ) - parser_summary.set_defaults(func=summarize_gcfs) - parser_cleanup = subparsers.add_parser( - "cleanup", - help="BigFrames cloud functions clean up.", - description="Delete the stale bigframes cloud functions.", - ) - parser_cleanup.add_argument( - "-n", - "--number", - type=int, - required=False, - default=100, - action="store", - help="Number of stale (more than a day old) cloud functions to clean up.", - ) - parser_cleanup.set_defaults(func=cleanup_gcfs) - - args = parser.parse_args(sys.argv[1:]) - if args.project_id is None: - args.project_id = get_project_from_environment() - if args.project_id is None: - raise ValueError( - "Could not resolve a project. Plese set it via --project-id option." - ) - args.func(args) diff --git a/packages/bigframes/scripts/notebooks_fill_params.py b/packages/bigframes/scripts/notebooks_fill_params.py deleted file mode 100644 index e0f7c8d687a4..000000000000 --- a/packages/bigframes/scripts/notebooks_fill_params.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import os -import re -import shutil -import sys - -GOOGLE_CLOUD_PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] - - -def make_backup(notebook_path: str): - shutil.copy( - notebook_path, - f"{notebook_path}.backup", - ) - - -def replace_project(line): - """ - Notebooks contain special colab `param {type:"string"}` - comments, which make it easy for customers to fill in their - own information. - """ - # Make sure we're robust to whitespace differences. - cleaned = re.sub(r"\s", "", line) - if cleaned == 'PROJECT_ID=""#@param{type:"string"}': - return f'PROJECT_ID = "{GOOGLE_CLOUD_PROJECT}" # @param {{type:"string"}}\n' - else: - return line - - -def replace_params(notebook_path: str): - with open(notebook_path, "r", encoding="utf-8") as notebook_file: - notebook_json = json.load(notebook_file) - - for cell in notebook_json["cells"]: - lines = cell.get("source", []) - new_lines = [replace_project(line) for line in lines] - cell["source"] = new_lines - - with open(notebook_path, "w", encoding="utf-8") as notebook_file: - json.dump(notebook_json, notebook_file, indent=2, ensure_ascii=False) - - -def main(notebook_paths): - for notebook_path in notebook_paths: - make_backup(notebook_path) - replace_params(notebook_path) - - -if __name__ == "__main__": - main(sys.argv[1:]) diff --git a/packages/bigframes/scripts/notebooks_restore_from_backup.py b/packages/bigframes/scripts/notebooks_restore_from_backup.py deleted file mode 100644 index 4d3e0333e399..000000000000 --- a/packages/bigframes/scripts/notebooks_restore_from_backup.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pathlib -import shutil -import sys - - -def restore_from_backup(notebook_path): - backup_path = pathlib.Path(f"{notebook_path}.backup") - if backup_path.exists(): - shutil.move( - backup_path, - notebook_path, - ) - - -def main(notebook_paths): - for notebook_path in notebook_paths: - restore_from_backup(notebook_path) - - -if __name__ == "__main__": - main(sys.argv[1:]) diff --git a/packages/bigframes/scripts/publish_api_coverage.py b/packages/bigframes/scripts/publish_api_coverage.py deleted file mode 100644 index f94cd7e6d7f6..000000000000 --- a/packages/bigframes/scripts/publish_api_coverage.py +++ /dev/null @@ -1,410 +0,0 @@ -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script runs with each release and publishes information about our pandas -and sklearn API coverage to BigQuery, where it can be used in our dashboards.""" - -import argparse -import inspect -import pathlib -import sys - -import pandas as pd -import pandas.core.groupby -import pandas.core.indexes.accessors -import pandas.core.strings.accessor -import pandas.core.window.rolling - -import bigframes -import bigframes.core.groupby -import bigframes.core.window -import bigframes.operations.datetimes -import bigframes.operations.strings -import bigframes.pandas as bpd - -REPO_ROOT = pathlib.Path(__file__).parent.parent - -BIGFRAMES_OBJECT = { - "pandas": "bigframes.pandas", - "dataframe": "bigframes.pandas.DataFrame", - "dataframegroupby": "bigframes.pandas.api.typing.DataFrameGroupBy", - "index": "bigframes.pandas.Index", - "series": "bigframes.pandas.Series", - "seriesgroupby": "bigframes.pandas.api.typing.SeriesGroupBy", - "datetimemethods": "bigframes.pandas.api.typing.DatetimeMethods", - "stringmethods": "bigframes.pandas.api.typing.StringMethods", - "window": "bigframes.pandas.api.typing.Window", -} - - -PANDAS_TARGETS = [ - ("pandas", pd, bpd), - ("dataframe", pd.DataFrame, bpd.DataFrame), - ( - "dataframegroupby", - pandas.core.groupby.DataFrameGroupBy, - bigframes.core.groupby.DataFrameGroupBy, - ), - ("series", pd.Series, bpd.Series), - ( - "seriesgroupby", - pandas.core.groupby.DataFrameGroupBy, - bigframes.core.groupby.DataFrameGroupBy, - ), - ( - "datetimemethods", - pandas.core.indexes.accessors.CombinedDatetimelikeProperties, - bigframes.operations.datetimes.DatetimeMethods, - ), - ( - "stringmethods", - pandas.core.strings.accessor.StringMethods, - bigframes.operations.strings.StringMethods, - ), - ( - "window", - pandas.core.window.rolling.Rolling, - bigframes.core.window.Window, - ), - ("index", pd.Index, bpd.Index), -] - - -def names_from_signature(signature): - """Extract the names of parameters from signature - - See: https://docs.python.org/3/library/inspect.html#inspect.signature - """ - return frozenset({parameter for parameter in signature.parameters}) - - -def calculate_missing_parameters(bigframes_function, target_function): - # Some built-in functions can't be inspected. These raise a ValueError. - try: - bigframes_signature = inspect.signature(bigframes_function) - target_signature = inspect.signature(target_function) - except ValueError: - return {} - - bigframes_params = names_from_signature(bigframes_signature) - target_params = names_from_signature(target_signature) - return target_params - bigframes_params - - -def generate_pandas_api_coverage(): - """Inspect all our pandas objects, and compare with the real pandas objects, to see - which methods we implement. For each, generate a regex that can be used to check if - its present in a notebook""" - header = [ - "api", - "pattern", - "kind", - "is_in_bigframes", - "missing_parameters", - "requires_index", - "requires_ordering", - ] - api_patterns = [] - indexers = ["loc", "iloc", "iat", "ix", "at"] - for name, pandas_obj, bigframes_obj in PANDAS_TARGETS: - for member in dir(pandas_obj): - missing_parameters = "" - - # skip private functions and properties - if member[0] == "_": - continue - - # skip members that are also common python methods - if member in {"append", "tolist", "format", "items", "keys"}: - continue - - if inspect.isfunction(getattr(pandas_obj, member)): - # Function, match .member( - token = f"\\.{member}\\(" - token_type = "function" - - if hasattr(bigframes_obj, member): - bigframes_function = getattr(bigframes_obj, member) - pandas_function = getattr(pandas_obj, member) - missing_parameters = ", ".join( - sorted( - calculate_missing_parameters( - bigframes_function, pandas_function - ) - ) - ) - elif member in indexers: - # Indexer, match .indexer[ - token = f"\\.{member}\\[" - token_type = "indexer" - else: - # Property - token = f"\\.{member}\\b" - token_type = "property" - - is_in_bigframes = hasattr(bigframes_obj, member) - requires_index = "" - requires_ordering = "" - - if is_in_bigframes: - attr = getattr(bigframes_obj, member) - - # TODO(b/361101138): Add check/documentation for partial - # support (e.g. with some parameters). - requires_index = ( - "Y" if hasattr(attr, "_validations_requires_index") else "" - ) - requires_ordering = ( - "Y" if hasattr(attr, "_validations_requires_ordering") else "" - ) - - api_patterns.append( - [ - f"{name}.{member}", - token, - token_type, - is_in_bigframes, - missing_parameters, - requires_index, - requires_ordering, - ] - ) - - return pd.DataFrame(api_patterns, columns=header) - - -def generate_sklearn_api_coverage(): - """Explore all SKLearn modules, and for each item contained generate a - regex to detect it being imported, and record whether we implement it""" - - import sklearn # noqa - - sklearn_modules = [ - "sklearn", - "sklearn.model_selection", - "sklearn.preprocessing", - "sklearn.metrics", - "sklearn.linear_model", - "sklearn.ensemble", - "sklearn.tree", - "sklearn.neighbors", - "sklearn.svm", - "sklearn.naive_bayes", - "sklearn.pipeline", - "sklearn.decomposition", - "sklearn.impute", - "sklearn.cluster", - "sklearn.feature_selection", - "sklearn.utils", - "sklearn.compose", - "sklearn.neural_network", - "sklearn.datasets", - "sklearn.base", - "sklearn.manifold", - "sklearn.discriminant_analysis", - "sklearn.experimental", - "sklearn.multiclass", - "sklearn.kernel_ridge", - "sklearn.feature_extraction", - "sklearn.dummy", - "sklearn.mixture", - "sklearn.gaussian_process", - "sklearn.calibration", - "sklearn.multioutput", - "sklearn.inspection", - "sklearn.exceptions", - "sklearn.cross_decomposition", - "sklearn.random_projection", - "sklearn.covariance", - "sklearn.semi_supervised", - "sklearn.isotonic", - "sklearn.kernel_approximation", - ] - - header = ["api", "pattern", "kind", "is_in_bigframes"] - api_patterns = [] - for module in sklearn_modules: - exec(f"import {module}") - members = eval(f"dir({module})") - bigframes_has_module = False - bigframes_members = [] - try: - bigframes_module = module.replace("sklearn", "bigframes.ml") - exec(f"import {bigframes_module}") - bigframes_has_module = True - bigframes_members = eval(f"dir({bigframes_module})") - except ImportError: - pass - - api_patterns.append( - [ - module, - f"from {module} import ", - "module", - bigframes_has_module, - ] - ) - for member in members: - # skip private functions and properties - if member[0] == "_": - continue - - api_patterns.append( - [ - f"{module}.{member}", - rf"from {module} import [^\n]*\b{member}\b", - "api", - member in bigframes_members, - ] - ) - - return pd.DataFrame(api_patterns, columns=header) - - -def build_api_coverage_table(bigframes_version: str, release_version: str): - pandas_cov_df = generate_pandas_api_coverage() - pandas_cov_df["module"] = "bigframes" - sklearn_cov_df = generate_sklearn_api_coverage() - sklearn_cov_df["module"] = "bigframes.ml" - combined_df = pd.concat([pandas_cov_df, sklearn_cov_df]) - combined_df["timestamp"] = pd.Timestamp.now() - # BigQuery only supports microsecond precision timestamps. - combined_df["timestamp"] = combined_df["timestamp"].astype("datetime64[us]") - combined_df["bigframes_version"] = bigframes_version - combined_df["release_version"] = release_version - combined_df = combined_df.infer_objects().convert_dtypes() - return combined_df - - -def format_api(api_names, is_in_bigframes, api_prefix): - api_names = api_names.str.slice(start=len(f"{api_prefix}.")) - formatted = "" + api_names + "" - bigframes_object = BIGFRAMES_OBJECT.get(api_prefix) - if bigframes_object is None: - return formatted - - linked = ( - '' - + formatted - + "" - ) - return formatted.mask(is_in_bigframes, linked) - - -def generate_api_coverage(df, api_prefix): - dataframe_apis = df.loc[df["api"].str.startswith(f"{api_prefix}.")] - fully_implemented = ( - dataframe_apis["missing_parameters"].str.len() == 0 - ) & dataframe_apis["is_in_bigframes"] - partial_implemented = ( - dataframe_apis["missing_parameters"].str.len() != 0 - ) & dataframe_apis["is_in_bigframes"] - not_implemented = ~dataframe_apis["is_in_bigframes"] - - dataframe_table = pd.DataFrame( - { - "API": format_api( - dataframe_apis["api"], - dataframe_apis["is_in_bigframes"], - api_prefix, - ), - "Implemented": "", - "Requires index": dataframe_apis["requires_index"], - "Requires ordering": dataframe_apis["requires_ordering"], - "Missing parameters": dataframe_apis["missing_parameters"], - } - ) - dataframe_table.loc[fully_implemented, "Implemented"] = "Y" - dataframe_table.loc[partial_implemented, "Implemented"] = "P" - dataframe_table.loc[not_implemented, "Implemented"] = "N" - return dataframe_table - - -def generate_api_coverage_doc(df, api_prefix): - dataframe_table = generate_api_coverage(df, api_prefix) - dataframe_table = dataframe_table.loc[~(dataframe_table["Implemented"] == "N")] - dataframe_table["Implemented"] = dataframe_table["Implemented"].map( - { - "Y": "Y", - "P": "P", - } - ) - - with open( - REPO_ROOT / "docs" / "supported_pandas_apis" / f"bf_{api_prefix}.html", - "w", - ) as html_file: - dataframe_table.to_html( - html_file, index=False, header=True, escape=False, border=0, col_space="8em" - ) - - -def generate_api_coverage_docs(df): - for target in PANDAS_TARGETS: - api_prefix = target[0] - generate_api_coverage_doc(df, api_prefix) - - -def print_api_coverage_summary(df, api_prefix): - dataframe_table = generate_api_coverage(df, api_prefix) - - print(api_prefix) - print(dataframe_table[["Implemented", "API"]].groupby(["Implemented"]).count()) - print(f"{api_prefix} APIs: {dataframe_table.shape[0]}\n") - - -def print_api_coverage_summaries(df): - for target in PANDAS_TARGETS: - api_prefix = target[0] - print_api_coverage_summary(df, api_prefix) - - print(f"\nAll APIs: {len(df.index)}") - fully_implemented = (df["missing_parameters"].str.len() == 0) & df[ - "is_in_bigframes" - ] - print(f"Y: {fully_implemented.sum()}") - partial_implemented = (df["missing_parameters"].str.len() != 0) & df[ - "is_in_bigframes" - ] - print(f"P: {partial_implemented.sum()}") - not_implemented = ~df["is_in_bigframes"] - print(f"N: {not_implemented.sum()}") - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument("output_type") - parser.add_argument("--bigframes_version", default=bigframes.__version__) - parser.add_argument("--release_version", default="") - parser.add_argument("--bigquery_table_name") - args = parser.parse_args() - df = build_api_coverage_table(args.bigframes_version, args.release_version) - - if args.output_type == "bigquery": - df.to_gbq(args.bigquery_table_name, if_exists="append") - elif args.output_type == "docs": - generate_api_coverage_docs(df) - elif args.output_type == "summary": - print_api_coverage_summaries(df) - else: - print(f"Unexpected output_type {repr(args.output_type)}") - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/packages/bigframes/scripts/run_and_publish_benchmark.py b/packages/bigframes/scripts/run_and_publish_benchmark.py deleted file mode 100644 index 859d68e60ed3..000000000000 --- a/packages/bigframes/scripts/run_and_publish_benchmark.py +++ /dev/null @@ -1,481 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import datetime -import json -import os -import pathlib -import re -import subprocess -import sys -import tempfile -from typing import Dict, List, Tuple, Union - -import numpy as np -import pandas as pd -import pandas_gbq - -LOGGING_NAME_ENV_VAR = "BIGFRAMES_PERFORMANCE_LOG_NAME" -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - - -def run_benchmark_subprocess(args, log_env_name_var, file_path=None, region=None): - """ - Runs a benchmark subprocess with configured environment variables. Adjusts PYTHONPATH, - sets region-specific BigQuery location, and logs environment variables. - - This function terminates the benchmark session if the subprocess exits with an error, - due to `check=True` in subprocess.run, which raises CalledProcessError on non-zero - exit status. - """ - env = os.environ.copy() - current_pythonpath = env.get("PYTHONPATH", "") - env["PYTHONPATH"] = ( - os.path.join(os.getcwd(), "tests") + os.pathsep + current_pythonpath - ) - - if region: - env["BIGQUERY_LOCATION"] = region - env[LOGGING_NAME_ENV_VAR] = log_env_name_var - try: - if file_path: # Notebooks - duration_pattern = re.compile(r"(\d+\.\d+)s call") - process = subprocess.Popen(args, env=env, stdout=subprocess.PIPE, text=True) - assert process.stdout is not None - for line in process.stdout: - print(line, end="") - match = duration_pattern.search(line) - if match: - duration = match.group(1) - with open(f"{file_path}.local_exec_time_seconds", "w") as f: - f.write(f"{duration}\n") - process.wait() - if process.returncode != 0: - raise subprocess.CalledProcessError(process.returncode, args) - else: # Benchmarks - file_path = log_env_name_var - subprocess.run(args, env=env, check=True) - except Exception: - directory = pathlib.Path(file_path).parent - for file in directory.glob(f"{pathlib.Path(file_path).name}.*"): - if file.suffix != ".backup": - print(f"Benchmark failed, deleting: {file}") - file.unlink() - error_file = directory / f"{pathlib.Path(file_path).name}.error" - error_file.touch() - - -def collect_benchmark_result( - benchmark_path: str, iterations: int -) -> Tuple[pd.DataFrame, Union[str, None]]: - """Generate a DataFrame report on HTTP queries, bytes processed, slot time and execution time from log files.""" - path = pathlib.Path(benchmark_path) - try: - results_dict: Dict[str, List[Union[int, float, None]]] = {} - # Use local_seconds_files as the baseline - local_seconds_files = sorted(path.rglob("*.local_exec_time_seconds")) - error_files = sorted(path.rglob("*.error")) - benchmarks_with_missing_files = [] - - for local_seconds_file in local_seconds_files: - base_name = local_seconds_file.name.removesuffix(".local_exec_time_seconds") - base_path = local_seconds_file.parent / base_name - filename = base_path.relative_to(path) - - # Construct paths for other metric files - bytes_file = pathlib.Path(f"{base_path}.bytesprocessed") - millis_file = pathlib.Path(f"{base_path}.slotmillis") - bq_seconds_file = pathlib.Path(f"{base_path}.bq_exec_time_seconds") - query_char_count_file = pathlib.Path(f"{base_path}.query_char_count") - - # Check if all corresponding files exist - missing_files = [] - if not bytes_file.exists(): - missing_files.append(bytes_file.name) - if not millis_file.exists(): - missing_files.append(millis_file.name) - if not bq_seconds_file.exists(): - missing_files.append(bq_seconds_file.name) - if not query_char_count_file.exists(): - missing_files.append(query_char_count_file.name) - - if missing_files: - benchmarks_with_missing_files.append((str(filename), missing_files)) - continue - - with open(query_char_count_file, "r") as file: - lines = file.read().splitlines() - query_char_count = sum(int(line) for line in lines) / iterations - query_count = len(lines) / iterations - - with open(local_seconds_file, "r") as file: - lines = file.read().splitlines() - local_seconds = sum(float(line) for line in lines) / iterations - - with open(bytes_file, "r") as file: - lines = file.read().splitlines() - total_bytes = sum(int(line) for line in lines) / iterations - - with open(millis_file, "r") as file: - lines = file.read().splitlines() - total_slot_millis = sum(int(line) for line in lines) / iterations - - with open(bq_seconds_file, "r") as file: - lines = file.read().splitlines() - bq_seconds = sum(float(line) for line in lines) / iterations - - results_dict[str(filename)] = [ - query_count, - total_bytes, - total_slot_millis, - local_seconds, - bq_seconds, - query_char_count, - ] - finally: - for files_to_remove in ( - path.rglob("*.bytesprocessed"), - path.rglob("*.slotmillis"), - path.rglob("*.local_exec_time_seconds"), - path.rglob("*.bq_exec_time_seconds"), - path.rglob("*.query_char_count"), - path.rglob("*.error"), - ): - for log_file in files_to_remove: - log_file.unlink() - - columns = [ - "Query_Count", - "Bytes_Processed", - "Slot_Millis", - "Local_Execution_Time_Sec", - "BigQuery_Execution_Time_Sec", - "Query_Char_Count", - ] - - benchmark_metrics = pd.DataFrame.from_dict( - results_dict, - orient="index", - columns=columns, - ) - - report_title = ( - "---BIGQUERY USAGE REPORT---" - if iterations == 1 - else f"---BIGQUERY USAGE REPORT (Averages over {iterations} Iterations)---" - ) - print(report_title) - for index, row in benchmark_metrics.iterrows(): - formatted_local_exec_time = ( - f"{round(row['Local_Execution_Time_Sec'], 1)} seconds" - if not pd.isna(row["Local_Execution_Time_Sec"]) - else "N/A" - ) - print( - f"{index} - query count: {row['Query_Count']}," - + f" query char count: {row['Query_Char_Count']}," - + f" bytes processed sum: {row['Bytes_Processed']}," - + f" slot millis sum: {row['Slot_Millis']}," - + f" local execution time: {formatted_local_exec_time}" - + f", bigquery execution time: {round(row['BigQuery_Execution_Time_Sec'], 1)} seconds" - ) - - geometric_mean_queries = geometric_mean_excluding_zeros( - benchmark_metrics["Query_Count"] - ) - geometric_mean_query_char_count = geometric_mean_excluding_zeros( - benchmark_metrics["Query_Char_Count"] - ) - geometric_mean_bytes = geometric_mean_excluding_zeros( - benchmark_metrics["Bytes_Processed"] - ) - geometric_mean_slot_millis = geometric_mean_excluding_zeros( - benchmark_metrics["Slot_Millis"] - ) - geometric_mean_local_seconds = geometric_mean_excluding_zeros( - benchmark_metrics["Local_Execution_Time_Sec"] - ) - geometric_mean_bq_seconds = geometric_mean_excluding_zeros( - benchmark_metrics["BigQuery_Execution_Time_Sec"] - ) - - print( - f"---Geometric mean of queries: {geometric_mean_queries}," - + f" Geometric mean of queries char counts: {geometric_mean_query_char_count}," - + f" Geometric mean of bytes processed: {geometric_mean_bytes}," - + f" Geometric mean of slot millis: {geometric_mean_slot_millis}," - + f" Geometric mean of local execution time: {geometric_mean_local_seconds} seconds" - + f", Geometric mean of BigQuery execution time: {geometric_mean_bq_seconds} seconds---" - ) - - all_errors: List[str] = [] - if error_files: - all_errors.extend( - f"Failed: {error_file.relative_to(path).with_suffix('')}" - for error_file in error_files - ) - if ( - benchmarks_with_missing_files - and os.getenv("BENCHMARK_AND_PUBLISH", "false") == "true" - ): - all_errors.extend( - f"Missing files for benchmark '{name}': {files}" - for name, files in benchmarks_with_missing_files - ) - error_message = "\n" + "\n".join(all_errors) if all_errors else None - return ( - benchmark_metrics.reset_index().rename(columns={"index": "Benchmark_Name"}), - error_message, - ) - - -def geometric_mean_excluding_zeros(data): - """ - Calculate the geometric mean of a dataset, excluding any zero values. - Returns NaN if the dataset is empty, contains only NaN values, or if - all non-NaN values are zeros. - - The result is rounded to one decimal place. - """ - data = data.dropna() - data = data[data != 0] - if len(data) == 0: - return np.nan - log_data = np.log(data) - return round(np.exp(log_data.mean()), 1) - - -def get_repository_status(): - current_directory = os.getcwd() - subprocess.run( - ["git", "config", "--global", "--add", "safe.directory", current_directory], - check=True, - ) - - git_hash = subprocess.check_output( - ["git", "rev-parse", "--short", "HEAD"], text=True - ).strip() - bigframes_version = subprocess.check_output( - ["python", "-c", "import bigframes; print(bigframes.__version__)"], text=True - ).strip() - release_version = ( - f"{bigframes_version}dev{datetime.datetime.now().strftime('%Y%m%d')}+{git_hash}" - ) - - return { - "benchmark_start_time": datetime.datetime.now().isoformat(), - "git_hash": git_hash, - "bigframes_version": bigframes_version, - "release_version": release_version, - "python_version": sys.version, - } - - -def find_config(start_path): - """ - Searches for a 'config.jsonl' file starting from the given path and moving up to parent - directories. - - This function ascends from the initial directory specified by `start_path` up to 3 - levels or until it reaches a directory named 'benchmark'. The search moves upwards - because if there are multiple 'config.jsonl' files in the path hierarchy, the closest - configuration to the starting directory (the lowest level) is expected to take effect. - It checks each directory for the presence of 'config.jsonl'. If found, it returns the - path to the configuration file. If not found within the limit or upon reaching - the 'benchmark' directory, it returns None. - """ - target_file = "config.jsonl" - current_path = pathlib.Path(start_path).resolve() - if current_path.is_file(): - current_path = current_path.parent - - levels_checked = 0 - while current_path.name != "benchmark" and levels_checked < 3: - config_path = current_path / target_file - if config_path.exists(): - return config_path - if current_path.parent == current_path: - break - current_path = current_path.parent - levels_checked += 1 - - return None - - -def publish_to_bigquery(dataframe, notebook, project_name="bigframes-metrics"): - bigquery_table = ( - f"{project_name}.benchmark_report.notebook_benchmark" - if notebook - else f"{project_name}.benchmark_report.benchmark" - ) - - repo_status = get_repository_status() - for idx, col in enumerate(repo_status.keys()): - dataframe.insert(idx, col, repo_status[col]) - - pandas_gbq.to_gbq( - dataframe=dataframe, - destination_table=bigquery_table, - if_exists="append", - ) - print(f"Results have been successfully uploaded to {bigquery_table}.") - - -def run_benchmark_from_config(benchmark: str, iterations: int): - print(benchmark) - config_path = find_config(benchmark) - - if config_path: - benchmark_configs = [] - with open(config_path, "r") as f: - for line in f: - if line.strip(): - config = json.loads(line) - python_args = [f"--{key}={value}" for key, value in config.items()] - suffix = ( - config["benchmark_suffix"] - if "benchmark_suffix" in config - else "_".join(f"{key}_{value}" for key, value in config.items()) - ) - benchmark_configs.append((suffix, python_args)) - else: - benchmark_configs = [(None, [])] - - for _ in range(iterations): - for benchmark_config in benchmark_configs: - args = ["python", str(benchmark)] - args.extend(benchmark_config[1]) - log_env_name_var = str(benchmark) - if benchmark_config[0] is not None: - log_env_name_var += f"_{benchmark_config[0]}" - run_benchmark_subprocess(args=args, log_env_name_var=log_env_name_var) - - -def run_notebook_benchmark(benchmark_file: str, region: str): - export_file = f"{benchmark_file}_{region}" if region else benchmark_file - log_env_name_var = os.path.basename(export_file) - # TODO(shobs): For some reason --retries arg masks exceptions occurred in - # notebook failures, and shows unhelpful INTERNALERROR. Investigate that - # and enable retries if we can find a way to surface the real exception - # bacause the notebook is running against real GCP and something may fail - # due to transient issues. - pytest_command = [ - "py.test", - "--nbmake", - "--nbmake-timeout=900", # 15 minutes - "--durations=0", - "--color=yes", - ] - benchmark_args = (*pytest_command, benchmark_file) - - run_benchmark_subprocess( - args=benchmark_args, - log_env_name_var=log_env_name_var, - file_path=export_file, - region=region, - ) - - -def parse_arguments(): - parser = argparse.ArgumentParser( - description="Run benchmarks for different scenarios." - ) - parser.add_argument( - "--notebook", - action="store_true", - help="Set this flag to run the benchmark as a notebook. If not set, it assumes a Python (.py) file.", - ) - - parser.add_argument( - "--benchmark-path", - type=str, - default=None, - help="Specify the file path to the benchmark script, either a Jupyter notebook or a Python script.", - ) - - parser.add_argument( - "--region", - type=str, - default=None, - help="Specify the region where the benchmark will be executed or where the data resides. This parameter is optional.", - ) - - parser.add_argument( - "--publish-benchmarks", - type=str, - default=None, - help="Set the benchmarks to be published to BigQuery.", - ) - - parser.add_argument( - "--iterations", - type=int, - default=1, - help="Number of iterations to run each benchmark.", - ) - parser.add_argument( - "--output-csv", - type=str, - default=None, - help="Determines whether to output results to a CSV file. If no location is provided, a temporary location is automatically generated.", - ) - - return parser.parse_args() - - -def main(): - args = parse_arguments() - - if args.publish_benchmarks: - benchmark_metrics, error_message = collect_benchmark_result( - args.publish_benchmarks, args.iterations - ) - # Output results to CSV without specifying a location - if args.output_csv == "True": - current_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S") - temp_file = tempfile.NamedTemporaryFile( - prefix=f"benchmark_{current_time}_", delete=False, suffix=".csv" - ) - benchmark_metrics.to_csv(temp_file.name, index=False) - print( - f"Benchmark result is saved to a temporary location: {temp_file.name}" - ) - temp_file.close() - # Output results to CSV with specified a custom location - elif args.output_csv != "False": - benchmark_metrics.to_csv(args.output_csv, index=False) - print(f"Benchmark result is saved to: {args.output_csv}") - - # Publish the benchmark metrics to BigQuery under the 'bigframes-metrics' project. - # The 'BENCHMARK_AND_PUBLISH' environment variable should be set to 'true' only - # in specific Kokoro sessions. - if os.getenv("BENCHMARK_AND_PUBLISH", "false") == "true": - publish_to_bigquery(benchmark_metrics, args.notebook) - # If the 'GCLOUD_BENCH_PUBLISH_PROJECT' environment variable is set, publish the - # benchmark metrics to a specified BigQuery table in the provided project. This is - # intended for local testing where the default behavior is not to publish results. - elif project := os.getenv("GCLOUD_BENCH_PUBLISH_PROJECT", ""): - publish_to_bigquery(benchmark_metrics, args.notebook, project) - - if error_message: - raise Exception(error_message) - elif args.notebook: - run_notebook_benchmark(args.benchmark_path, args.region) - else: - run_benchmark_from_config(args.benchmark_path, args.iterations) - - -if __name__ == "__main__": - main() diff --git a/packages/bigframes/scripts/setup-project-for-testing.sh b/packages/bigframes/scripts/setup-project-for-testing.sh deleted file mode 100755 index df9cea46a4be..000000000000 --- a/packages/bigframes/scripts/setup-project-for-testing.sh +++ /dev/null @@ -1,259 +0,0 @@ -#!/bin/bash - -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -if [ $# -lt 1 ]; then - echo "USAGE: `basename $0` []" - echo "EXAMPLES:" - echo " `basename $0` my-project" - echo " `basename $0` my-project user:user_id@example.com" - echo " `basename $0` my-project group:group_id@example.com" - echo " `basename $0` my-project serviceAccount:service_account_id@example.com" - exit 1 -fi - -PROJECT_ID=$1 -PRINCIPAL=$2 -BIGFRAMES_DEFAULT_CONNECTION_NAME=bigframes-default-connection -BIGFRAMES_RF_CONNECTION_NAME=bigframes-rf-conn - -if [ "$PRINCIPAL" != "" ]; then - echo $PRINCIPAL | grep -E "(user|group|serviceAccount):" >/dev/null - if [ $? -ne 0 ]; then - echo "principal must have prefix 'user:', 'group:' or 'serviceAccount:'" - exit 1 - fi -fi - -if ! test `which gcloud`; then - echo "gcloud CLI is not installed. Install it from https://cloud.google.com/sdk/docs/install." >&2 - exit 1 -fi - -################################################################################ -# Log and execute a command -################################################################################ -function log_and_execute() { - echo Running command: $* - $* -} - - -################################################################################ -# Enable APIs -################################################################################ -function enable_apis() { - for service in aiplatform.googleapis.com \ - artifactregistry.googleapis.com \ - bigquery.googleapis.com \ - bigqueryconnection.googleapis.com \ - bigquerystorage.googleapis.com \ - cloudbuild.googleapis.com \ - cloudfunctions.googleapis.com \ - cloudresourcemanager.googleapis.com \ - compute.googleapis.com \ - run.googleapis.com \ - ; do - log_and_execute gcloud --project=$PROJECT_ID services enable $service - if [ $? -ne 0 ]; then - echo "Failed to enable service $service, exiting..." - exit 1 - fi - done -} - - -################################################################################ -# Ensure a BQ connection exists with desired IAM rols -################################################################################ -function ensure_bq_connection_with_iam() { - if [ $# -ne 2 ]; then - echo "USAGE: `basename $0` " - echo "EXAMPLES:" - echo " `basename $0` my-project my-connection" - exit 1 - fi - - location=$1 - connection_name=$2 - - log_and_execute bq show \ - --connection \ - --project_id=$PROJECT_ID \ - --location=$location \ - $connection_name 2>&1 >/dev/null - if [ $? -ne 0 ]; then - echo "Connection $connection_name doesn't exists in location \"$location\", creating..." - log_and_execute bq mk \ - --connection \ - --project_id=$PROJECT_ID \ - --location=$location \ - --connection_type=CLOUD_RESOURCE \ - $connection_name - if [ $? -ne 0 ]; then - echo "Failed creating connection, exiting." - exit 1 - fi - else - echo "Connection $connection_name already exists in location $location." - fi - - compact_json_info_cmd="bq show --connection \ - --project_id=$PROJECT_ID \ - --location=$location \ - --format=json \ - $connection_name" - compact_json_info_cmd_output=`$compact_json_info_cmd` - if [ $? -ne 0 ]; then - echo "Failed to fetch connection info: $compact_json_info_cmd_output" - exit 1 - fi - - connection_service_account=`echo $compact_json_info_cmd_output | sed -e 's/.*"cloudResource":{"serviceAccountId":"//' -e 's/".*//'` - - # Configure roles for the service accounts associated with the connection - for role in run.invoker aiplatform.user; do - log_and_execute gcloud projects add-iam-policy-binding $PROJECT_ID \ - --member=serviceAccount:$connection_service_account \ - --role=roles/$role - if [ $? -ne 0 ]; then - echo "Failed to set IAM, exiting..." - exit 1 - fi - done -} - - -################################################################################ -# Create the default BQ connection in US location -################################################################################ -function ensure_bq_connections_with_iam() { - ensure_bq_connection_with_iam "us" "$BIGFRAMES_DEFAULT_CONNECTION_NAME" - - # Create commonly used BQ connection in various locations - for location in asia-southeast1 \ - eu \ - europe-west4 \ - southamerica-west1 \ - us \ - us-central1 \ - us-east5 \ - ; do - ensure_bq_connection_with_iam "$location" "$BIGFRAMES_RF_CONNECTION_NAME" - done -} - - -################################################################################ -# Set up IAM roles for principal -################################################################################ -function setup_iam_roles () { - if [ "$PRINCIPAL" != "" ]; then - for role in aiplatform.user \ - bigquery.user \ - bigquery.connectionAdmin \ - bigquery.dataEditor \ - browser \ - cloudfunctions.developer \ - iam.serviceAccountUser \ - ; do - log_and_execute gcloud projects add-iam-policy-binding $PROJECT_ID \ - --member=$PRINCIPAL \ - --role=roles/$role - if [ $? -ne 0 ]; then - echo "Failed to set IAM, exiting..." - exit 1 - fi - done - fi -} - - -################################################################################ -# Create vertex endpoint for test ML model -################################################################################ -function create_bq_model_vertex_endpoint () { - vertex_region=us-central1 - model_name=bigframes-test-linreg2 - endpoint_name=$model_name-endpoint - - # Create vertex model - log_and_execute python scripts/create_test_model_vertex.py \ - -m $model_name \ - -p $PROJECT_ID - if [ $? -ne 0 ]; then - echo "Failed to create model, exiting..." - exit 1 - fi - - # Create vertex endpoint - log_and_execute gcloud ai endpoints create \ - --project=$PROJECT_ID \ - --region=$vertex_region \ - --display-name=$endpoint_name - if [ $? -ne 0 ]; then - echo "Failed to create vertex endpoint, exiting..." - exit 1 - fi - - # Fetch endpoint id - endpoint_id=`gcloud ai endpoints list \ - --project=$PROJECT_ID \ - --region=$vertex_region \ - --filter=display_name=$endpoint_name 2>/dev/null \ - | tail -n1 | cut -d' ' -f 1` - if [ "$endpoint_id" = "" ]; then - echo "Failed to fetch vertex endpoint id, exiting..." - exit 1 - fi - - # Deploy the model to the vertex endpoint - log_and_execute gcloud ai endpoints deploy-model $endpoint_id \ - --project=$PROJECT_ID \ - --region=$vertex_region \ - --model=$model_name \ - --display-name=$model_name - if [ $? -ne 0 ]; then - echo "Failed to deploy model to vertex endpoint, exiting..." - exit 1 - fi - - # Form the endpoint - endpoint_rel_path=`gcloud ai endpoints describe \ - --project=$PROJECT_ID \ - --region=us-central1 \ - $endpoint_id 2>/dev/null \ - | grep "^name:" | cut -d' ' -f2` - if [ "$endpoint_rel_path" = "" ]; then - echo "Failed to fetch vertex endpoint relativr path, exiting..." - exit 1 - fi - endpoint_path=https://$vertex_region-aiplatform.googleapis.com/v1/$endpoint_rel_path - - # Print the endpoint configuration to be used in tests - echo - echo Run following command to set test model vertex endpoint: - echo export BIGFRAMES_TEST_MODEL_VERTEX_ENDPOINT=$endpoint_path -} - - -################################################################################ -# Set the things up -################################################################################ -enable_apis -ensure_bq_connections_with_iam -setup_iam_roles -create_bq_model_vertex_endpoint diff --git a/packages/bigframes/scripts/test_publish_api_coverage.py b/packages/bigframes/scripts/test_publish_api_coverage.py deleted file mode 100644 index 167cf5917b0b..000000000000 --- a/packages/bigframes/scripts/test_publish_api_coverage.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys - -import pandas -import pytest -from publish_api_coverage import build_api_coverage_table - -pytest.importorskip("sklearn") - - -@pytest.fixture -def api_coverage_df(): - return build_api_coverage_table("my_bf_ver", "my_release_ver") - - -@pytest.mark.skipif( - sys.version_info >= (3, 13), - reason="Issues with installing sklearn for this test in python 3.13", -) -def test_api_coverage_produces_expected_schema(api_coverage_df): - # Older pandas has different timestamp default precision - pytest.importorskip("pandas", minversion="2.0.0") - - pandas.testing.assert_series_equal( - api_coverage_df.dtypes, - pandas.Series( - data={ - # Note to developer: if you update this test, you will also - # need to update schema of the API coverage BigQuery table in - # the bigframes-metrics project. - "api": "string", - "pattern": "string", - "kind": "string", - "is_in_bigframes": "boolean", - "missing_parameters": "string", - "requires_index": "string", - "requires_ordering": "string", - "module": "string", - "timestamp": "datetime64[us]", - "bigframes_version": "string", - "release_version": "string", - }, - ), - # String dtype behavior not consistent across pandas versions - check_dtype=False, - ) - - -@pytest.mark.skipif( - sys.version_info >= (3, 13), - reason="Issues with installing sklearn for this test in python 3.13", -) -def test_api_coverage_produces_missing_parameters(api_coverage_df): - """Make sure at least some functions have reported missing parameters.""" - assert (api_coverage_df["missing_parameters"].str.len() > 0).any() diff --git a/packages/bigframes/scripts/upload_to_google_drive.py b/packages/bigframes/scripts/upload_to_google_drive.py deleted file mode 100644 index dcdc9168ba2c..000000000000 --- a/packages/bigframes/scripts/upload_to_google_drive.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Upload latest wheel to Google Drive. - -Based on -https://github.com/googleapis/google-resumable-media-python/blob/main/google/resumable_media/requests/__init__.py - -Before running, execute the following to make sure you can use the Google Drive API: - -gcloud auth application-default login --scopes=openid,https://www.googleapis.com/auth/userinfo.email,https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/sqlservice.login,https://www.googleapis.com/auth/drive -""" - -import pathlib - -import google.auth -import google.auth.transport.requests -import google.resumable_media._upload -import google.resumable_media.requests as resumable_requests - -repo_root = pathlib.Path(__file__).parent.parent - -# Use PATCH instead of POST to replace existing files. -google.resumable_media._upload._POST = "PATCH" - -credentials, _ = google.auth.default(scopes=["https://www.googleapis.com/auth/drive"]) -transport = google.auth.transport.requests.AuthorizedSession(credentials) - -wheel_id = "15fZ1DkrFDk4ibMNTzms4akpxmf2pzeAR" -wheel_path = next(iter((repo_root / "dist").glob("bigframes-*.whl"))) - -uploads = ( - (wheel_id, wheel_path, "application/octet-stream"), - # (pdf_id, pdf_path, "application/pdf"), -) - -upload_template = ( - "https://www.googleapis.com/upload/drive/v3/files/{file_id}?uploadType=resumable" -) -chunk_size = 1024 * 1024 # 1MB - -for file_id, file_path, content_type in uploads: - print(f"Uploading {file_path}") - transport = google.auth.transport.requests.AuthorizedSession(credentials) - upload = resumable_requests.ResumableUpload( - upload_template.format(file_id=file_id), chunk_size - ) - - with open(file_path, "rb") as stream: - response = upload.initiate( - transport, stream, metadata={}, content_type=content_type - ) - print(response) - while not upload.finished: - response = upload.transmit_next_chunk(transport) - print(response) diff --git a/packages/bigframes/scripts/windows/build.bat b/packages/bigframes/scripts/windows/build.bat deleted file mode 100644 index d599702c98e9..000000000000 --- a/packages/bigframes/scripts/windows/build.bat +++ /dev/null @@ -1,38 +0,0 @@ -@rem Copyright 2024 Google LLC -@rem -@rem Licensed under the Apache License, Version 2.0 (the "License"); -@rem you may not use this file except in compliance with the License. -@rem You may obtain a copy of the License at -@rem -@rem http://www.apache.org/licenses/LICENSE-2.0 -@rem -@rem Unless required by applicable law or agreed to in writing, software -@rem distributed under the License is distributed on an "AS IS" BASIS, -@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -@rem See the License for the specific language governing permissions and -@rem limitations under the License. - -:; Change directory to repo root. -SET script_dir="%~dp0" -cd "%~dp0"\..\.. - -echo "Listing available Python versions' -py -0 || goto :error - -py -3.10 -m pip install --upgrade pip || goto :error -py -3.10 -m pip install --upgrade pip setuptools wheel || goto :error - -echo "Building Wheel" -py -3.10 -m pip wheel . --wheel-dir wheels || goto :error/ - -echo "Built wheel, now running tests." -call "%script_dir%"/test.bat 3.10 || goto :error - -echo "Windows build has completed successfully" - -:; https://stackoverflow.com/a/46813196/101923 -:; exit 0 -exit /b 0 - -:error -exit /b %errorlevel% diff --git a/packages/bigframes/scripts/windows/test.bat b/packages/bigframes/scripts/windows/test.bat deleted file mode 100644 index bcd605bd129f..000000000000 --- a/packages/bigframes/scripts/windows/test.bat +++ /dev/null @@ -1,40 +0,0 @@ -@rem Copyright 2024 Google LLC -@rem -@rem Licensed under the Apache License, Version 2.0 (the "License"); -@rem you may not use this file except in compliance with the License. -@rem You may obtain a copy of the License at -@rem -@rem http://www.apache.org/licenses/LICENSE-2.0 -@rem -@rem Unless required by applicable law or agreed to in writing, software -@rem distributed under the License is distributed on an "AS IS" BASIS, -@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -@rem See the License for the specific language governing permissions and -@rem limitations under the License. - -@rem This test file runs for one Python version at a time, and is intended to -@rem be called from within the build loop. - -:; Change directory to repo root. -SET script_dir="%~dp0" -cd "%~dp0"\..\.. - -set PYTHON_VERSION=%1 -if "%PYTHON_VERSION%"=="" ( - echo "Python version was not provided, using Python 3.10" - set PYTHON_VERSION=3.10 -) - -py -%PYTHON_VERSION%-64 -m pip install nox || goto :error - -py -%PYTHON_VERSION%-64 -m nox -s unit-"%PYTHON_VERSION%" || goto :error - -:; TODO(b/358148440): enable system tests on windows -:; py -%PYTHON_VERSION%-64 -m nox -s system-"%PYTHON_VERSION%" || goto :error - -:; https://stackoverflow.com/a/46813196/101923 -:; exit 0 -exit /b 0 - -:error -exit /b %errorlevel% diff --git a/packages/bigquery-magics/.repo-metadata.json b/packages/bigquery-magics/.repo-metadata.json index 8343b50d5c25..b30b6f137902 100644 --- a/packages/bigquery-magics/.repo-metadata.json +++ b/packages/bigquery-magics/.repo-metadata.json @@ -1,16 +1,13 @@ { - "name": "bigquery-magics", - "name_pretty": "Google BigQuery connector for Jupyter and IPython", - "product_documentation": "https://cloud.google.com/bigquery", + "api_id": "bigquery.googleapis.com", "client_documentation": "https://googleapis.dev/python/bigquery-magics/latest/", + "distribution_name": "bigquery-magics", "issue_tracker": "https://github.com/googleapis/python-bigquery-magics/issues", - "release_level": "stable", "language": "python", "library_type": "INTEGRATION", - "repo": "googleapis/google-cloud-python", - "distribution_name": "bigquery-magics", - "api_id": "bigquery.googleapis.com", - "requires_billing": false, - "default_version": "", - "codeowner_team": "@googleapis/cloud-sdk-python-team @googleapis/bigquery-dataframe-team" -} + "name": "bigquery-magics", + "name_pretty": "Google BigQuery connector for Jupyter and IPython", + "product_documentation": "https://cloud.google.com/bigquery", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/bigquery-magics/docs/README.rst b/packages/bigquery-magics/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/packages/bigquery-magics/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/packages/bigquery-magics/docs/README.rst b/packages/bigquery-magics/docs/README.rst new file mode 100644 index 000000000000..dd8f041ffa16 --- /dev/null +++ b/packages/bigquery-magics/docs/README.rst @@ -0,0 +1,103 @@ +IPython Magics for BigQuery +=========================== + +|GA| |pypi| |versions| + +Querying massive datasets can be time consuming and expensive without the +right hardware and infrastructure. Google `BigQuery`_ solves this problem by +enabling super-fast, SQL queries against append-mostly tables, using the +processing power of Google's infrastructure. + +- `Library Documentation`_ +- `Product Documentation`_ + +.. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability +.. |pypi| image:: https://img.shields.io/pypi/v/bigquery-magics.svg + :target: https://pypi.org/project/bigquery-magics/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/bigquery-magics.svg + :target: https://pypi.org/project/bigquery-magics/ +.. _BigQuery: https://cloud.google.com/bigquery/what-is-bigquery +.. _Library Documentation: https://googleapis.dev/python/bigquery-magics/latest +.. _Product Documentation: https://cloud.google.com/bigquery/docs/reference/v2/ + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Cloud BigQuery API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Cloud BigQuery API.: https://cloud.google.com/bigquery +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Python >= 3.9 + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.8. + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install bigquery-magics + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install bigquery-magics + +Example Usage +------------- + +To use these magics, you must first register them. Run the ``%load_ext bigquery_magics`` +in a Jupyter notebook cell. + +.. code-block:: + + %load_ext bigquery_magics + +Perform a query +~~~~~~~~~~~~~~~ + +.. code:: python + + %%bigquery + SELECT name, SUM(number) as count + FROM 'bigquery-public-data.usa_names.usa_1910_current' + GROUP BY name + ORDER BY count DESC + LIMIT 3 diff --git a/packages/db-dtypes/.repo-metadata.json b/packages/db-dtypes/.repo-metadata.json index b7c4c9729484..6f8ee606655a 100644 --- a/packages/db-dtypes/.repo-metadata.json +++ b/packages/db-dtypes/.repo-metadata.json @@ -1,15 +1,12 @@ { + "api_id": "bigquery.googleapis.com", + "client_documentation": "https://googleapis.dev/python/db-dtypes/latest/index.html", + "distribution_name": "db-dtypes", + "language": "python", + "library_type": "INTEGRATION", "name": "db-dtypes", "name_pretty": "Pandas Data Types for SQL systems (BigQuery, Spanner)", "product_documentation": "https://pandas.pydata.org/pandas-docs/stable/ecosystem.html#ecosystem-extensions", - "client_documentation": "https://googleapis.dev/python/db-dtypes/latest/index.html", "release_level": "stable", - "language": "python", - "library_type": "INTEGRATION", - "repo": "googleapis/google-cloud-python", - "distribution_name": "db-dtypes", - "api_id": "bigquery.googleapis.com", - "default_version": "", - "codeowner_team": "@googleapis/python-core-client-libraries", - "api_description": "Pandas extension data types for data from SQL systems such as BigQuery." -} + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/db-dtypes/docs/README.rst b/packages/db-dtypes/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/packages/db-dtypes/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/packages/db-dtypes/docs/README.rst b/packages/db-dtypes/docs/README.rst new file mode 100644 index 000000000000..eab2705e2487 --- /dev/null +++ b/packages/db-dtypes/docs/README.rst @@ -0,0 +1,63 @@ +Pandas Data Types for SQL systems (BigQuery, Spanner) +===================================================== + +|ga| |pypi| |versions| + +`Pandas extension data types`_ for data from SQL systems such as `BigQuery`_. + +- `Library Documentation`_ + +.. |ga| image:: https://img.shields.io/badge/support-GA-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability +.. |pypi| image:: https://img.shields.io/pypi/v/db-dtypes.svg + :target: https://pypi.org/project/db-dtypes/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/db-dtypes.svg + :target: https://pypi.org/project/db-dtypes/ +.. _Pandas extension data types: https://pandas.pydata.org/docs/development/extending.html#extension-types +.. _BigQuery: https://cloud.google.com/bigquery/docs/ +.. _Library Documentation: https://googleapis.dev/python/db-dtypes/latest + + +Installation +------------ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Python >= 3.9 + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.8. + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install db-dtypes + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install db-dtypes diff --git a/packages/django-google-spanner/.repo-metadata.json b/packages/django-google-spanner/.repo-metadata.json index 22866389b5fa..9a8193e69015 100644 --- a/packages/django-google-spanner/.repo-metadata.json +++ b/packages/django-google-spanner/.repo-metadata.json @@ -1,14 +1,13 @@ { - "name": "django-google-spanner", - "name_pretty": "Cloud Spanner Django", - "product_documentation": "https://cloud.google.com/spanner/docs/", + "api_shortname": "django-google-spanner", "client_documentation": "https://googleapis.dev/python/django-google-spanner/latest", + "distribution_name": "django-google-spanner", "issue_tracker": "https://issuetracker.google.com/issues?q=componentid:190851%2B%20status:open", - "release_level": "stable", "language": "python", "library_type": "INTEGRATION", - "repo": "googleapis/google-cloud-python", - "distribution_name": "django-google-spanner", - "requires_billing": true, - "api_shortname": "django-google-spanner" -} + "name": "django-google-spanner", + "name_pretty": "Cloud Spanner Django", + "product_documentation": "https://cloud.google.com/spanner/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/django-google-spanner/docs/README.rst b/packages/django-google-spanner/docs/README.rst index 72a33558153f..902696c81756 100644 --- a/packages/django-google-spanner/docs/README.rst +++ b/packages/django-google-spanner/docs/README.rst @@ -1 +1,288 @@ -.. include:: ../README.rst +Cloud Spanner support for Django +================================ + +|GA| |pypi| |versions| + +`Cloud Spanner`_ is the world's first fully managed relational database service +to offer both strong consistency and horizontal scalability for +mission-critical online transaction processing (OLTP) applications. With Cloud +Spanner you enjoy all the traditional benefits of a relational database; but +unlike any other relational database service, Cloud Spanner scales horizontally +to hundreds or thousands of servers to handle the biggest transactional +workloads. + + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability +.. |pypi| image:: https://img.shields.io/pypi/v/django-google-spanner.svg + :target: https://pypi.org/project/django-google-spanner/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/django-google-spanner.svg + :target: https://pypi.org/project/django-google-spanner/ +.. _Cloud Spanner: https://cloud.google.com/spanner/ +.. _Client Library Documentation: https://googleapis.dev/python/django-google-spanner/latest/index.html +.. _Product Documentation: https://cloud.google.com/spanner/docs + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Cloud Spanner API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Cloud Spanner API.: https://cloud.google.com/spanner +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +This package provides a `3rd-party database backend +`__ +for using `Cloud Spanner `__ with the `Django +ORM `__. It uses the `Cloud +Spanner Python client library `__ +under the hood. + +Installation +------------ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python and Django environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Supported versions +~~~~~~~~~~~~~~~~~~ + +The library supports `Django 3.2 +`_, and `Django 4.2 +`_. +Both versions are long-term support (LTS) releases for the +`Django project_`. +The minimum required Python version is 3.6. + +.. code:: shell + + pip3 install django==3.2 + + +Installing the package +~~~~~~~~~~~~~~~~~~~~~~ + +To install from PyPI: + +.. code:: shell + + pip3 install django-google-spanner + + +To install from source: + +.. code:: shell + + git clone git@github.com:googleapis/google-cloud-python.git + cd python-spanner-django + pip3 install -e . + + +Creating a Cloud Spanner instance and database +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you don't already have a Cloud Spanner database, or want to start from +scratch for a new Django application, you can `create a new instance +`__ +and `database +`__ +using the Google Cloud SDK: + +.. code:: shell + + gcloud spanner instances create $INSTANCE --config=regional-us-central1 --description="New Django Instance" --nodes=1 + gcloud spanner databases create $DB --instance $INSTANCE + + +Configuring ``settings.py`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This package provides a Django application named ``django_spanner``. To use the +Cloud Spanner database backend, the application needs to installed and +configured: + +- Add ``django_spanner`` as the first entry in ``INSTALLED_APPS``: + + .. code:: python + + INSTALLED_APPS = [ + 'django_spanner', + ... + ] + +- Edit the ``DATABASES`` setting to point to an existing Cloud Spanner database: + + .. code:: python + + DATABASES = { + 'default': { + 'ENGINE': 'django_spanner', + 'PROJECT': '$PROJECT', + 'INSTANCE': '$INSTANCE', + 'NAME': '$DATABASE', + } + } + +Configuring primary key generation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The Spanner Django engine by default uses random int64 values that are generated +by the client as primary key values. This default is applied to all databases that are +configured, including databases that use a different engine than Spanner. You can +disable this behavior with the RANDOM_ID_GENERATION_ENABLED setting: + + .. code:: python + + DATABASES = { + 'default': { + 'ENGINE': 'django_spanner', + 'PROJECT': '$PROJECT', + 'INSTANCE': '$INSTANCE', + 'NAME': '$DATABASE', + 'RANDOM_ID_GENERATION_ENABLED': false, + } + } + + + +Transaction support in autocommit mode +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Django version 4.2 and higher by default supports transactions in autocommit mode. +A transaction is automatically started if you define an +[atomic block](https://docs.djangoproject.com/en/4.2/topics/db/transactions/#controlling-transactions-explicitly). + +Django version 3.2 and earlier did not support transactions in autocommit mode with Spanner. +You can enable transactions in autocommit mode with Spanner with the +`ALLOW_TRANSACTIONS_IN_AUTO_COMMIT` configuration option. + +- To enable transactions in autocommit mode in V3.2, set the flag `ALLOW_TRANSACTIONS_IN_AUTO_COMMIT` to True in your settings.py file. +- To disable transactions in autocommit mode in V4.2, set the flag `ALLOW_TRANSACTIONS_IN_AUTO_COMMIT` to False in your settings.py file. + + +Set credentials and project environment variables +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +You'll need to download a service account JSON key file and point to it using an environment variable: + +.. code:: shell + + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/keyfile.json + export GOOGLE_CLOUD_PROJECT=gcloud_project + + +Apply the migrations +~~~~~~~~~~~~~~~~~~~~ + +Please run: + +.. code:: shell + + $ python3 manage.py migrate + +That'll take a while to run. After this you should be able to see the tables and indexes created in your Cloud Spanner console. + + +Create a Django admin user +~~~~~~~~~~~~~~~~~~~~~~~~~~~ +First you’ll need to create a user who can login to the admin site. Run the following command: + +.. code:: shell + + $ python3 manage.py createsuperuser + +which will then produce a prompt which will allow you to create your super user + +.. code:: shell + + Username: admin + Email address: admin@example.com + Password: ********** + Password (again): ********** + Superuser created successfully. + + +Login as admin +~~~~~~~~~~~~~~ +Now, run the server + +.. code:: shell + + python3 manage.py runserver + +Then visit http://127.0.0.1:8000/admin/ + +Create and register your first model +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Please follow the guides in https://docs.djangoproject.com/en/4.2/intro/tutorial02/#creating-models +to create and register the model to the Django’s automatically-generated admin site. + +How it works +------------ + +Overall design +~~~~~~~~~~~~~~ + +.. figure:: https://raw.githubusercontent.com/googleapis/google-cloud-python/main/assets/overview.png + :alt: "Overall Design" + +Internals +~~~~~~~~~ + +.. figure:: https://raw.githubusercontent.com/googleapis/google-cloud-python/main/assets/internals.png + :alt: "Internals" + + +Executing a query +~~~~~~~~~~~~~~~~~ + +Here is an example of how to add a row for Model Author, save it and later query it using Django + +.. code:: shell + + >>> author_kent = Author( first_name="Arthur", last_name="Kent", rating=Decimal("4.1"),) + >>> author_kent.save() + >>> qs1 = Author.objects.all().values("first_name", "last_name") + + +How to contribute +~~~~~~~~~~~~~~~~~ + +Contributions to this library are always welcome and highly encouraged. + +See `CONTRIBUTING `_ for more information on how to get started. + +Please note that this project is released with a Contributor Code of Conduct. +By participating in this project you agree to abide by its terms. See the `Code +of Conduct `_ for more information. + + +Limitations +~~~~~~~~~~~ + +Spanner has certain limitations of its own. The full set of limitations is documented +`here `__. +It is recommended that you go through that list. + +Django spanner has a set of limitations as well, which you can find +`here `__. + +Features from spanner that are not supported in Django-spanner are listed +`here `__. diff --git a/packages/gapic-generator/.repo-metadata.json b/packages/gapic-generator/.repo-metadata.json index f13375b124da..0bbf8431ca82 100644 --- a/packages/gapic-generator/.repo-metadata.json +++ b/packages/gapic-generator/.repo-metadata.json @@ -1,14 +1,11 @@ { - "name": "gapic-generator", - "name_pretty": "Google API Client Generator for Python", "client_documentation": "https://gapic-generator-python.readthedocs.io/en/stable/", - "product_documentation": "", + "distribution_name": "gapic-generator", "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "release_level": "stable", "language": "python", "library_type": "CORE", - "repo": "googleapis/google-cloud-python", - "distribution_name": "gapic-generator", - "default_version": "", - "codeowner_team": "@googleapis/cloud-sdk-python-team" -} + "name": "gapic-generator", + "name_pretty": "Google API Client Generator for Python", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/gcp-sphinx-docfx-yaml/.repo-metadata.json b/packages/gcp-sphinx-docfx-yaml/.repo-metadata.json index a6a695c756f5..e98b7e7da38d 100644 --- a/packages/gcp-sphinx-docfx-yaml/.repo-metadata.json +++ b/packages/gcp-sphinx-docfx-yaml/.repo-metadata.json @@ -1,13 +1,12 @@ { - "name": "gcp-sphinx-docfx-yaml", - "name_pretty": "Sphinx DocFX YAML Generator", - "product_documentation": "https://github.com/googleapis/sphinx-docfx-yaml", - "client_documentation": "https://github.com/googleapis/sphinx-docfx-yaml", - "issue_tracker": "https://github.com/googleapis/sphinx-docfx-yaml/issues", - "release_level": "preview", - "language": "python", - "repo": "googleapis/google-cloud-python", - "distribution_name": "gcp-sphinx-docfx-yaml", - "codeowner_team": "@googleapis/dkp-team", - "library_type": "OTHER" -} + "client_documentation": "https://github.com/googleapis/sphinx-docfx-yaml", + "distribution_name": "gcp-sphinx-docfx-yaml", + "issue_tracker": "https://github.com/googleapis/sphinx-docfx-yaml/issues", + "language": "python", + "library_type": "OTHER", + "name": "gcp-sphinx-docfx-yaml", + "name_pretty": "Sphinx DocFX YAML Generator", + "product_documentation": "https://github.com/googleapis/sphinx-docfx-yaml", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-ads-admanager/.repo-metadata.json b/packages/google-ads-admanager/.repo-metadata.json index 6431729f6332..9348224ec34d 100644 --- a/packages/google-ads-admanager/.repo-metadata.json +++ b/packages/google-ads-admanager/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Manage your Ad Manager inventory, run reports and more.", - "api_id": "admanager.googleapis.com", - "api_shortname": "admanager", - "client_documentation": "https://googleapis.dev/python/google-ads-admanager/latest", - "default_version": "v1", - "distribution_name": "google-ads-admanager", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1265187&template=1787490", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-ads-admanager", - "name_pretty": "Google Ad Manager", - "product_documentation": "https://developers.google.com/ad-manager/api/beta", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Manage your Ad Manager inventory, run reports and more.", + "api_id": "admanager.googleapis.com", + "api_shortname": "admanager", + "client_documentation": "https://googleapis.dev/python/google-ads-admanager/latest", + "default_version": "v1", + "distribution_name": "google-ads-admanager", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1265187\u0026template=1787490", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-ads-admanager", + "name_pretty": "Google Ad Manager", + "product_documentation": "https://developers.google.com/ad-manager/api/beta", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-ads-datamanager/.repo-metadata.json b/packages/google-ads-datamanager/.repo-metadata.json index 6374062415f3..0cf1dae7bb98 100644 --- a/packages/google-ads-datamanager/.repo-metadata.json +++ b/packages/google-ads-datamanager/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "A unified ingestion API for data partners, agencies and advertisers to\nconnect first-party data across Google advertising products.", - "api_id": "datamanager.googleapis.com", - "api_shortname": "datamanager", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-ads-datamanager/latest", - "default_version": "v1", - "distribution_name": "google-ads-datamanager", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1812065", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-ads-datamanager", - "name_pretty": "Data Manager API", - "product_documentation": "https://developers.google.com/data-manager", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" -} + "api_description": "A unified ingestion API for data partners, agencies and advertisers to\nconnect first-party data across Google advertising products.", + "api_id": "datamanager.googleapis.com", + "api_shortname": "datamanager", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-ads-datamanager/latest", + "default_version": "v1", + "distribution_name": "google-ads-datamanager", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1812065", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-ads-datamanager", + "name_pretty": "Data Manager API", + "product_documentation": "https://developers.google.com/data-manager", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/.repo-metadata.json b/packages/google-ads-marketingplatform-admin/.repo-metadata.json index 07c5179fc42d..7c6135f3dade 100644 --- a/packages/google-ads-marketingplatform-admin/.repo-metadata.json +++ b/packages/google-ads-marketingplatform-admin/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "The Google Marketing Platform Admin API allows for programmatic access to the Google Marketing Platform configuration data. You can use the Google Marketing Platform Admin API to manage links between your Google Marketing Platform organization and Google Analytics accounts, and to set the service level of your GA4 properties.", - "api_id": "marketingplatformadmin.googleapis.com", - "api_shortname": "marketingplatformadmin", - "client_documentation": "https://googleapis.dev/python/google-ads-marketingplatform-admin/latest", - "default_version": "v1alpha", - "distribution_name": "google-ads-marketingplatform-admin", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-ads-marketingplatform-admin", - "name_pretty": "Google Marketing Platform Admin API", - "product_documentation": "https://developers.google.com/analytics/devguides/config/gmp/v1", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "The Google Marketing Platform Admin API allows for programmatic access to the Google Marketing Platform configuration data. You can use the Google Marketing Platform Admin API to manage links between your Google Marketing Platform organization and Google Analytics accounts, and to set the service level of your GA4 properties.", + "api_id": "marketingplatformadmin.googleapis.com", + "api_shortname": "marketingplatformadmin", + "client_documentation": "https://googleapis.dev/python/google-ads-marketingplatform-admin/latest", + "default_version": "v1alpha", + "distribution_name": "google-ads-marketingplatform-admin", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-ads-marketingplatform-admin", + "name_pretty": "Google Marketing Platform Admin API", + "product_documentation": "https://developers.google.com/analytics/devguides/config/gmp/v1", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-ai-generativelanguage/.repo-metadata.json b/packages/google-ai-generativelanguage/.repo-metadata.json index 04c681557b27..e97dc77e7c4b 100644 --- a/packages/google-ai-generativelanguage/.repo-metadata.json +++ b/packages/google-ai-generativelanguage/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "The Gemini API allows developers to build generative AI applications using Gemini models. Gemini is our most capable model, built from the ground up to be multimodal. It can generalize and seamlessly understand, operate across, and combine different types of information including language, images, audio, video, and code. You can use the Gemini API for use cases like reasoning across text and images, content generation, dialogue agents, summarization and classification systems, and more.", - "api_id": "generativelanguage.googleapis.com", - "api_shortname": "generativelanguage", - "client_documentation": "https://googleapis.dev/python/generativelanguage/latest", - "default_version": "v1beta", - "distribution_name": "google-ai-generativelanguage", - "issue_tracker": "https://github.com/google/generative-ai-python/issues/new", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "generativelanguage", - "name_pretty": "Generative Language API", - "product_documentation": "https://ai.google.dev/docs", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "The Gemini API allows developers to build generative AI applications using Gemini models. Gemini is our most capable model, built from the ground up to be multimodal. It can generalize and seamlessly understand, operate across, and combine different types of information including language, images, audio, video, and code. You can use the Gemini API for use cases like reasoning across text and images, content generation, dialogue agents, summarization and classification systems, and more.", + "api_id": "generativelanguage.googleapis.com", + "api_shortname": "generativelanguage", + "client_documentation": "https://googleapis.dev/python/generativelanguage/latest", + "default_version": "v1beta", + "distribution_name": "google-ai-generativelanguage", + "issue_tracker": "https://github.com/google/generative-ai-python/issues/new", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "generativelanguage", + "name_pretty": "Generative Language API", + "product_documentation": "https://ai.google.dev/docs", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-analytics-admin/.repo-metadata.json b/packages/google-analytics-admin/.repo-metadata.json index b7efbd7279f3..20c2e21a36d7 100644 --- a/packages/google-analytics-admin/.repo-metadata.json +++ b/packages/google-analytics-admin/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "allows you to manage Google Analytics accounts and properties.", - "api_id": "analyticsadmin.googleapis.com", - "api_shortname": "analyticsadmin", - "client_documentation": "https://googleapis.dev/python/analyticsadmin/latest", - "default_version": "v1alpha", - "distribution_name": "google-analytics-admin", - "issue_tracker": "https://issuetracker.google.com/issues?q=componentid:187400", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "analyticsadmin", - "name_pretty": "Analytics Admin", - "product_documentation": "https://developers.google.com/analytics/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "allows you to manage Google Analytics accounts and properties.", + "api_id": "analyticsadmin.googleapis.com", + "api_shortname": "analyticsadmin", + "client_documentation": "https://googleapis.dev/python/analyticsadmin/latest", + "default_version": "v1alpha", + "distribution_name": "google-analytics-admin", + "issue_tracker": "https://issuetracker.google.com/issues?q=componentid:187400", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "analyticsadmin", + "name_pretty": "Analytics Admin", + "product_documentation": "https://developers.google.com/analytics/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-analytics-data/.repo-metadata.json b/packages/google-analytics-data/.repo-metadata.json index 7e614a48bfd2..c2602030d645 100644 --- a/packages/google-analytics-data/.repo-metadata.json +++ b/packages/google-analytics-data/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "provides programmatic methods to access report data in Google Analytics App+Web properties.", - "api_id": "analyticsdata.googleapis.com", - "api_shortname": "analyticsdata", - "client_documentation": "https://googleapis.dev/python/analyticsdata/latest", - "default_version": "v1beta", - "distribution_name": "google-analytics-data", - "issue_tracker": "https://issuetracker.google.com/issues?q=componentid:187400", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "analyticsdata", - "name_pretty": "Analytics Data", - "product_documentation": "https://developers.google.com/analytics/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "provides programmatic methods to access report data in Google Analytics App+Web properties.", + "api_id": "analyticsdata.googleapis.com", + "api_shortname": "analyticsdata", + "client_documentation": "https://googleapis.dev/python/analyticsdata/latest", + "default_version": "v1beta", + "distribution_name": "google-analytics-data", + "issue_tracker": "https://issuetracker.google.com/issues?q=componentid:187400", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "analyticsdata", + "name_pretty": "Analytics Data", + "product_documentation": "https://developers.google.com/analytics/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-api-core/.repo-metadata.json b/packages/google-api-core/.repo-metadata.json index 3d818f156586..0bf54747a80f 100644 --- a/packages/google-api-core/.repo-metadata.json +++ b/packages/google-api-core/.repo-metadata.json @@ -1,12 +1,10 @@ { - "name": "google-api-core", - "name_pretty": "Google API client core library", "client_documentation": "https://googleapis.dev/python/google-api-core/latest", - "release_level": "stable", + "distribution_name": "google-api-core", "language": "python", "library_type": "CORE", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-api-core", - "default_version": "", - "codeowner_team": "@googleapis/cloud-sdk-python-team" -} + "name": "google-api-core", + "name_pretty": "Google API client core library", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-apps-card/.repo-metadata.json b/packages/google-apps-card/.repo-metadata.json index 18a3cf658056..e183b48c4acd 100644 --- a/packages/google-apps-card/.repo-metadata.json +++ b/packages/google-apps-card/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Google Apps Card Protos", - "api_id": "card.googleapis.com", - "api_shortname": "card", - "client_documentation": "https://googleapis.dev/python/google-apps-card/latest", - "default_version": "v1", - "distribution_name": "google-apps-card", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-apps-card", - "name_pretty": "Google Apps Card Protos", - "product_documentation": "https://developers.google.com/chat", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Google Apps Card Protos", + "api_id": "card.googleapis.com", + "api_shortname": "card", + "client_documentation": "https://googleapis.dev/python/google-apps-card/latest", + "default_version": "v1", + "distribution_name": "google-apps-card", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-apps-card", + "name_pretty": "Google Apps Card Protos", + "product_documentation": "https://developers.google.com/chat", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-apps-chat/.repo-metadata.json b/packages/google-apps-chat/.repo-metadata.json index e45f67af577e..c4c685511023 100644 --- a/packages/google-apps-chat/.repo-metadata.json +++ b/packages/google-apps-chat/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "", - "api_id": "chat.googleapis.com", - "api_shortname": "chat", - "client_documentation": "https://googleapis.dev/python/google-apps-chat/latest", - "default_version": "v1", - "distribution_name": "google-apps-chat", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-apps-chat", - "name_pretty": "Chat API", - "product_documentation": "https://developers.google.com/chat/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "The Google Chat API lets you build Chat apps to integrate your services\nwith Google Chat and manage Chat resources such as spaces, members, and\nmessages.", + "api_id": "chat.googleapis.com", + "api_shortname": "chat", + "client_documentation": "https://googleapis.dev/python/google-apps-chat/latest", + "default_version": "v1", + "distribution_name": "google-apps-chat", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-apps-chat", + "name_pretty": "Chat API", + "product_documentation": "https://developers.google.com/chat/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-apps-chat/README.rst b/packages/google-apps-chat/README.rst index 653a602e9a74..492388605771 100644 --- a/packages/google-apps-chat/README.rst +++ b/packages/google-apps-chat/README.rst @@ -3,7 +3,9 @@ Python Client for Chat API |preview| |pypi| |versions| -`Chat API`_: +`Chat API`_: The Google Chat API lets you build Chat apps to integrate your services +with Google Chat and manage Chat resources such as spaces, members, and +messages. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-apps-chat/docs/README.rst b/packages/google-apps-chat/docs/README.rst index 653a602e9a74..492388605771 100644 --- a/packages/google-apps-chat/docs/README.rst +++ b/packages/google-apps-chat/docs/README.rst @@ -3,7 +3,9 @@ Python Client for Chat API |preview| |pypi| |versions| -`Chat API`_: +`Chat API`_: The Google Chat API lets you build Chat apps to integrate your services +with Google Chat and manage Chat resources such as spaces, members, and +messages. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-apps-chat/google/apps/chat/__init__.py b/packages/google-apps-chat/google/apps/chat/__init__.py index 453874ad0db6..4f472be4866c 100644 --- a/packages/google-apps-chat/google/apps/chat/__init__.py +++ b/packages/google-apps-chat/google/apps/chat/__init__.py @@ -111,6 +111,21 @@ ListReactionsResponse, Reaction, ) +from google.apps.chat_v1.types.section import ( + CreateSectionRequest, + DeleteSectionRequest, + ListSectionItemsRequest, + ListSectionItemsResponse, + ListSectionsRequest, + ListSectionsResponse, + MoveSectionItemRequest, + MoveSectionItemResponse, + PositionSectionRequest, + PositionSectionResponse, + Section, + SectionItem, + UpdateSectionRequest, +) from google.apps.chat_v1.types.slash_command import SlashCommand from google.apps.chat_v1.types.space import ( CompleteImportSpaceRequest, @@ -230,6 +245,19 @@ "ListReactionsRequest", "ListReactionsResponse", "Reaction", + "CreateSectionRequest", + "DeleteSectionRequest", + "ListSectionItemsRequest", + "ListSectionItemsResponse", + "ListSectionsRequest", + "ListSectionsResponse", + "MoveSectionItemRequest", + "MoveSectionItemResponse", + "PositionSectionRequest", + "PositionSectionResponse", + "Section", + "SectionItem", + "UpdateSectionRequest", "SlashCommand", "CompleteImportSpaceRequest", "CompleteImportSpaceResponse", diff --git a/packages/google-apps-chat/google/apps/chat_v1/__init__.py b/packages/google-apps-chat/google/apps/chat_v1/__init__.py index cd6270ac6cd6..8faf4e22479c 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/__init__.py +++ b/packages/google-apps-chat/google/apps/chat_v1/__init__.py @@ -119,6 +119,21 @@ ListReactionsResponse, Reaction, ) +from .types.section import ( + CreateSectionRequest, + DeleteSectionRequest, + ListSectionItemsRequest, + ListSectionItemsResponse, + ListSectionsRequest, + ListSectionsResponse, + MoveSectionItemRequest, + MoveSectionItemResponse, + PositionSectionRequest, + PositionSectionResponse, + Section, + SectionItem, + UpdateSectionRequest, +) from .types.slash_command import SlashCommand from .types.space import ( CompleteImportSpaceRequest, @@ -270,6 +285,7 @@ def _get_version(dependency_name): "CreateMembershipRequest", "CreateMessageRequest", "CreateReactionRequest", + "CreateSectionRequest", "CreateSpaceRequest", "CustomEmoji", "CustomEmojiMetadata", @@ -277,6 +293,7 @@ def _get_version(dependency_name): "DeleteMembershipRequest", "DeleteMessageRequest", "DeleteReactionRequest", + "DeleteSectionRequest", "DeleteSpaceRequest", "DeletionMetadata", "Dialog", @@ -306,6 +323,10 @@ def _get_version(dependency_name): "ListMessagesResponse", "ListReactionsRequest", "ListReactionsResponse", + "ListSectionItemsRequest", + "ListSectionItemsResponse", + "ListSectionsRequest", + "ListSectionsResponse", "ListSpaceEventsRequest", "ListSpaceEventsResponse", "ListSpacesRequest", @@ -326,6 +347,10 @@ def _get_version(dependency_name): "MessageCreatedEventData", "MessageDeletedEventData", "MessageUpdatedEventData", + "MoveSectionItemRequest", + "MoveSectionItemResponse", + "PositionSectionRequest", + "PositionSectionResponse", "QuotedMessageMetadata", "QuotedMessageSnapshot", "Reaction", @@ -336,6 +361,8 @@ def _get_version(dependency_name): "RichLinkMetadata", "SearchSpacesRequest", "SearchSpacesResponse", + "Section", + "SectionItem", "SetUpSpaceRequest", "SlashCommand", "SlashCommandMetadata", @@ -349,6 +376,7 @@ def _get_version(dependency_name): "ThreadReadState", "UpdateMembershipRequest", "UpdateMessageRequest", + "UpdateSectionRequest", "UpdateSpaceNotificationSettingRequest", "UpdateSpaceReadStateRequest", "UpdateSpaceRequest", diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json b/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json index 5df6c90f6825..7cdaf7f91975 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json @@ -35,6 +35,11 @@ "create_reaction" ] }, + "CreateSection": { + "methods": [ + "create_section" + ] + }, "CreateSpace": { "methods": [ "create_space" @@ -60,6 +65,11 @@ "delete_reaction" ] }, + "DeleteSection": { + "methods": [ + "delete_section" + ] + }, "DeleteSpace": { "methods": [ "delete_space" @@ -135,6 +145,16 @@ "list_reactions" ] }, + "ListSectionItems": { + "methods": [ + "list_section_items" + ] + }, + "ListSections": { + "methods": [ + "list_sections" + ] + }, "ListSpaceEvents": { "methods": [ "list_space_events" @@ -145,6 +165,16 @@ "list_spaces" ] }, + "MoveSectionItem": { + "methods": [ + "move_section_item" + ] + }, + "PositionSection": { + "methods": [ + "position_section" + ] + }, "SearchSpaces": { "methods": [ "search_spaces" @@ -165,6 +195,11 @@ "update_message" ] }, + "UpdateSection": { + "methods": [ + "update_section" + ] + }, "UpdateSpace": { "methods": [ "update_space" @@ -215,6 +250,11 @@ "create_reaction" ] }, + "CreateSection": { + "methods": [ + "create_section" + ] + }, "CreateSpace": { "methods": [ "create_space" @@ -240,6 +280,11 @@ "delete_reaction" ] }, + "DeleteSection": { + "methods": [ + "delete_section" + ] + }, "DeleteSpace": { "methods": [ "delete_space" @@ -315,6 +360,16 @@ "list_reactions" ] }, + "ListSectionItems": { + "methods": [ + "list_section_items" + ] + }, + "ListSections": { + "methods": [ + "list_sections" + ] + }, "ListSpaceEvents": { "methods": [ "list_space_events" @@ -325,6 +380,16 @@ "list_spaces" ] }, + "MoveSectionItem": { + "methods": [ + "move_section_item" + ] + }, + "PositionSection": { + "methods": [ + "position_section" + ] + }, "SearchSpaces": { "methods": [ "search_spaces" @@ -345,6 +410,11 @@ "update_message" ] }, + "UpdateSection": { + "methods": [ + "update_section" + ] + }, "UpdateSpace": { "methods": [ "update_space" @@ -395,6 +465,11 @@ "create_reaction" ] }, + "CreateSection": { + "methods": [ + "create_section" + ] + }, "CreateSpace": { "methods": [ "create_space" @@ -420,6 +495,11 @@ "delete_reaction" ] }, + "DeleteSection": { + "methods": [ + "delete_section" + ] + }, "DeleteSpace": { "methods": [ "delete_space" @@ -495,6 +575,16 @@ "list_reactions" ] }, + "ListSectionItems": { + "methods": [ + "list_section_items" + ] + }, + "ListSections": { + "methods": [ + "list_sections" + ] + }, "ListSpaceEvents": { "methods": [ "list_space_events" @@ -505,6 +595,16 @@ "list_spaces" ] }, + "MoveSectionItem": { + "methods": [ + "move_section_item" + ] + }, + "PositionSection": { + "methods": [ + "position_section" + ] + }, "SearchSpaces": { "methods": [ "search_spaces" @@ -525,6 +625,11 @@ "update_message" ] }, + "UpdateSection": { + "methods": [ + "update_section" + ] + }, "UpdateSpace": { "methods": [ "update_space" diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py index 1f16c255dca5..646b04bb490f 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py @@ -60,6 +60,7 @@ membership, message, reaction, + section, slash_command, space, space_event, @@ -72,6 +73,7 @@ from google.apps.chat_v1.types import membership as gc_membership from google.apps.chat_v1.types import message as gc_message from google.apps.chat_v1.types import reaction as gc_reaction +from google.apps.chat_v1.types import section as gc_section from google.apps.chat_v1.types import space as gc_space from google.apps.chat_v1.types import ( space_notification_setting as gc_space_notification_setting, @@ -122,6 +124,10 @@ class ChatServiceAsyncClient: ) reaction_path = staticmethod(ChatServiceClient.reaction_path) parse_reaction_path = staticmethod(ChatServiceClient.parse_reaction_path) + section_path = staticmethod(ChatServiceClient.section_path) + parse_section_path = staticmethod(ChatServiceClient.parse_section_path) + section_item_path = staticmethod(ChatServiceClient.section_item_path) + parse_section_item_path = staticmethod(ChatServiceClient.parse_section_item_path) space_path = staticmethod(ChatServiceClient.space_path) parse_space_path = staticmethod(ChatServiceClient.parse_space_path) space_event_path = staticmethod(ChatServiceClient.space_event_path) @@ -142,6 +148,8 @@ class ChatServiceAsyncClient: parse_thread_read_state_path = staticmethod( ChatServiceClient.parse_thread_read_state_path ) + user_path = staticmethod(ChatServiceClient.user_path) + parse_user_path = staticmethod(ChatServiceClient.parse_user_path) common_billing_account_path = staticmethod( ChatServiceClient.common_billing_account_path ) @@ -575,9 +583,7 @@ async def list_messages( - `App authentication `__ with `administrator - approval `__ in - `Developer - Preview `__ + approval `__ with the authorization scope: - ``https://www.googleapis.com/auth/chat.app.messages.readonly``. @@ -1059,9 +1065,7 @@ async def get_message( that invoke the Chat app. - ``https://www.googleapis.com/auth/chat.app.messages.readonly`` with `administrator - approval `__ - (available in `Developer - Preview `__). + approval `__. When using this authentication scope, this method returns details about a public message in a space. @@ -4835,14 +4839,14 @@ async def get_space_event( - `App authentication `__ with `administrator - approval `__ in - `Developer - Preview `__ + approval `__ with one of the following authorization scopes: - ``https://www.googleapis.com/auth/chat.app.spaces`` + - ``https://www.googleapis.com/auth/chat.app.spaces.readonly`` - ``https://www.googleapis.com/auth/chat.app.messages.readonly`` - ``https://www.googleapis.com/auth/chat.app.memberships`` + - ``https://www.googleapis.com/auth/chat.app.memberships.readonly`` - `User authentication `__ @@ -4994,14 +4998,14 @@ async def list_space_events( - `App authentication `__ with `administrator - approval `__ in - `Developer - Preview `__ + approval `__ with one of the following authorization scopes: - ``https://www.googleapis.com/auth/chat.app.spaces`` + - ``https://www.googleapis.com/auth/chat.app.spaces.readonly`` - ``https://www.googleapis.com/auth/chat.app.messages.readonly`` - ``https://www.googleapis.com/auth/chat.app.memberships`` + - ``https://www.googleapis.com/auth/chat.app.memberships.readonly`` - `User authentication `__ @@ -5499,6 +5503,948 @@ async def sample_update_space_notification_setting(): # Done; return the response. return response + async def create_section( + self, + request: Optional[Union[gc_section.CreateSectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + section: Optional[gc_section.Section] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gc_section.Section: + r"""Creates a section in Google Chat. Sections help users group + conversations and customize the list of spaces displayed in Chat + navigation panel. Only sections of type ``CUSTOM_SECTION`` can + be created. For details, see `Create and organize sections in + Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + async def sample_create_section(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + section = chat_v1.Section() + section.type_ = "DEFAULT_APPS" + + request = chat_v1.CreateSectionRequest( + parent="parent_value", + section=section, + ) + + # Make the request + response = await client.create_section(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.apps.chat_v1.types.CreateSectionRequest, dict]]): + The request object. Request message for creating a + section. + parent (:class:`str`): + Required. The parent resource name where the section is + created. + + Format: ``users/{user}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + section (:class:`google.apps.chat_v1.types.Section`): + Required. The section to create. + This corresponds to the ``section`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.chat_v1.types.Section: + Represents a [section](https://support.google.com/chat/answer/16059854) in + Google Chat. Sections help users organize their + spaces. There are two types of sections: + + 1. **System Sections:** These are predefined sections + managed by Google Chat. Their resource names are + fixed, and they cannot be created, deleted, or have + their display_name modified. Examples include: \* + users/{user}/sections/default-direct-messages \* + users/{user}/sections/default-spaces \* + users/{user}/sections/default-apps + + 2. **Custom Sections:** These are sections created + and managed by the user. Creating a custom section + using CreateSection **requires** a display_name. + Custom sections can be updated using UpdateSection + and deleted using DeleteSection. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, section] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gc_section.CreateSectionRequest): + request = gc_section.CreateSectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if section is not None: + request.section = section + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_section + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_section( + self, + request: Optional[Union[section.DeleteSectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a section of type ``CUSTOM_SECTION``. + + If the section contains items, such as spaces, the items are + moved to Google Chat's default sections and are not deleted. + + For details, see `Create and organize sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + async def sample_delete_section(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.DeleteSectionRequest( + name="name_value", + ) + + # Make the request + await client.delete_section(request=request) + + Args: + request (Optional[Union[google.apps.chat_v1.types.DeleteSectionRequest, dict]]): + The request object. Request message for deleting a section. `Developer + Preview `__. + name (:class:`str`): + Required. The name of the section to delete. + + Format: ``users/{user}/sections/{section}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, section.DeleteSectionRequest): + request = section.DeleteSectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_section + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_section( + self, + request: Optional[Union[gc_section.UpdateSectionRequest, dict]] = None, + *, + section: Optional[gc_section.Section] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gc_section.Section: + r"""Updates a section. Only sections of type ``CUSTOM_SECTION`` can + be updated. For details, see `Create and organize sections in + Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + async def sample_update_section(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + section = chat_v1.Section() + section.type_ = "DEFAULT_APPS" + + request = chat_v1.UpdateSectionRequest( + section=section, + ) + + # Make the request + response = await client.update_section(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.apps.chat_v1.types.UpdateSectionRequest, dict]]): + The request object. Request message for updating a + section. + section (:class:`google.apps.chat_v1.types.Section`): + Required. The section to update. + This corresponds to the ``section`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The mask to specify which fields to update. + + Currently supported field paths: + + - ``display_name`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.chat_v1.types.Section: + Represents a [section](https://support.google.com/chat/answer/16059854) in + Google Chat. Sections help users organize their + spaces. There are two types of sections: + + 1. **System Sections:** These are predefined sections + managed by Google Chat. Their resource names are + fixed, and they cannot be created, deleted, or have + their display_name modified. Examples include: \* + users/{user}/sections/default-direct-messages \* + users/{user}/sections/default-spaces \* + users/{user}/sections/default-apps + + 2. **Custom Sections:** These are sections created + and managed by the user. Creating a custom section + using CreateSection **requires** a display_name. + Custom sections can be updated using UpdateSection + and deleted using DeleteSection. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [section, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gc_section.UpdateSectionRequest): + request = gc_section.UpdateSectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if section is not None: + request.section = section + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_section + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("section.name", request.section.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_sections( + self, + request: Optional[Union[section.ListSectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSectionsAsyncPager: + r"""Lists sections available to the Chat user. Sections help users + group their conversations and customize the list of spaces + displayed in Chat navigation panel. For details, see `Create and + organize sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + - ``https://www.googleapis.com/auth/chat.users.sections.readonly`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + async def sample_list_sections(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.ListSectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sections(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.apps.chat_v1.types.ListSectionsRequest, dict]]): + The request object. Request message for listing sections. + parent (:class:`str`): + Required. The parent, which is the user resource name + that owns this collection of sections. Only supports + listing sections for the calling user. To refer to the + calling user, set one of the following: + + - The ``me`` alias. For example, ``users/me``. + + - Their Workspace email address. For example, + ``users/user@example.com``. + + - Their user id. For example, ``users/123456789``. + + Format: ``users/{user}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.chat_v1.services.chat_service.pagers.ListSectionsAsyncPager: + Response message for listing + sections. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, section.ListSectionsRequest): + request = section.ListSectionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_sections + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSectionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def position_section( + self, + request: Optional[Union[section.PositionSectionRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> section.PositionSectionResponse: + r"""Changes the sort order of a section. For details, see `Create + and organize sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + async def sample_position_section(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.PositionSectionRequest( + sort_order=1091, + name="name_value", + ) + + # Make the request + response = await client.position_section(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.apps.chat_v1.types.PositionSectionRequest, dict]]): + The request object. Request message for positioning a + section. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.chat_v1.types.PositionSectionResponse: + Response message for positioning a + section. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, section.PositionSectionRequest): + request = section.PositionSectionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.position_section + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_section_items( + self, + request: Optional[Union[section.ListSectionItemsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSectionItemsAsyncPager: + r"""Lists items in a section. + + Only spaces can be section items. For details, see `Create and + organize sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + - ``https://www.googleapis.com/auth/chat.users.sections.readonly`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + async def sample_list_section_items(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.ListSectionItemsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_section_items(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.apps.chat_v1.types.ListSectionItemsRequest, dict]]): + The request object. Request message for listing section + items. + parent (:class:`str`): + Required. The parent, which is the section resource name + that owns this collection of section items. Only + supports listing section items for the calling user. + + When you're filtering by space, use the wildcard ``-`` + to search across all sections. For example, + ``users/{user}/sections/-``. + + Format: ``users/{user}/sections/{section}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.chat_v1.services.chat_service.pagers.ListSectionItemsAsyncPager: + Response message for listing section + items. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, section.ListSectionItemsRequest): + request = section.ListSectionItemsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_section_items + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSectionItemsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def move_section_item( + self, + request: Optional[Union[section.MoveSectionItemRequest, dict]] = None, + *, + name: Optional[str] = None, + target_section: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> section.MoveSectionItemResponse: + r"""Moves an item from one section to another. For example, if a + section contains spaces, this method can be used to move a space + to a different section. For details, see `Create and organize + sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + async def sample_move_section_item(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.MoveSectionItemRequest( + name="name_value", + target_section="target_section_value", + ) + + # Make the request + response = await client.move_section_item(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.apps.chat_v1.types.MoveSectionItemRequest, dict]]): + The request object. Request message for moving a section + item across sections. + name (:class:`str`): + Required. The resource name of the section item to move. + + Format: ``users/{user}/sections/{section}/items/{item}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_section (:class:`str`): + Required. The resource name of the section to move the + section item to. + + Format: ``users/{user}/sections/{section}`` + + This corresponds to the ``target_section`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.chat_v1.types.MoveSectionItemResponse: + Response message for moving a section + item. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, target_section] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, section.MoveSectionItemRequest): + request = section.MoveSectionItemRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if target_section is not None: + request.target_section = target_section + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.move_section_item + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "ChatServiceAsyncClient": return self diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py index 3bc454149959..147fe44be01c 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py @@ -77,6 +77,7 @@ membership, message, reaction, + section, slash_command, space, space_event, @@ -89,6 +90,7 @@ from google.apps.chat_v1.types import membership as gc_membership from google.apps.chat_v1.types import message as gc_message from google.apps.chat_v1.types import reaction as gc_reaction +from google.apps.chat_v1.types import section as gc_section from google.apps.chat_v1.types import space as gc_space from google.apps.chat_v1.types import ( space_notification_setting as gc_space_notification_setting, @@ -374,6 +376,44 @@ def parse_reaction_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def section_path( + user: str, + section: str, + ) -> str: + """Returns a fully-qualified section string.""" + return "users/{user}/sections/{section}".format( + user=user, + section=section, + ) + + @staticmethod + def parse_section_path(path: str) -> Dict[str, str]: + """Parses a section path into its component segments.""" + m = re.match(r"^users/(?P.+?)/sections/(?P
.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def section_item_path( + user: str, + section: str, + item: str, + ) -> str: + """Returns a fully-qualified section_item string.""" + return "users/{user}/sections/{section}/items/{item}".format( + user=user, + section=section, + item=item, + ) + + @staticmethod + def parse_section_item_path(path: str) -> Dict[str, str]: + """Parses a section_item path into its component segments.""" + m = re.match( + r"^users/(?P.+?)/sections/(?P
.+?)/items/(?P.+?)$", path + ) + return m.groupdict() if m else {} + @staticmethod def space_path( space: str, @@ -484,6 +524,21 @@ def parse_thread_read_state_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def user_path( + user: str, + ) -> str: + """Returns a fully-qualified user string.""" + return "users/{user}".format( + user=user, + ) + + @staticmethod + def parse_user_path(path: str) -> Dict[str, str]: + """Parses a user path into its component segments.""" + m = re.match(r"^users/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -1176,9 +1231,7 @@ def list_messages( - `App authentication `__ with `administrator - approval `__ in - `Developer - Preview `__ + approval `__ with the authorization scope: - ``https://www.googleapis.com/auth/chat.app.messages.readonly``. @@ -1651,9 +1704,7 @@ def get_message( that invoke the Chat app. - ``https://www.googleapis.com/auth/chat.app.messages.readonly`` with `administrator - approval `__ - (available in `Developer - Preview `__). + approval `__. When using this authentication scope, this method returns details about a public message in a space. @@ -5353,14 +5404,14 @@ def get_space_event( - `App authentication `__ with `administrator - approval `__ in - `Developer - Preview `__ + approval `__ with one of the following authorization scopes: - ``https://www.googleapis.com/auth/chat.app.spaces`` + - ``https://www.googleapis.com/auth/chat.app.spaces.readonly`` - ``https://www.googleapis.com/auth/chat.app.messages.readonly`` - ``https://www.googleapis.com/auth/chat.app.memberships`` + - ``https://www.googleapis.com/auth/chat.app.memberships.readonly`` - `User authentication `__ @@ -5509,14 +5560,14 @@ def list_space_events( - `App authentication `__ with `administrator - approval `__ in - `Developer - Preview `__ + approval `__ with one of the following authorization scopes: - ``https://www.googleapis.com/auth/chat.app.spaces`` + - ``https://www.googleapis.com/auth/chat.app.spaces.readonly`` - ``https://www.googleapis.com/auth/chat.app.messages.readonly`` - ``https://www.googleapis.com/auth/chat.app.memberships`` + - ``https://www.googleapis.com/auth/chat.app.memberships.readonly`` - `User authentication `__ @@ -6009,6 +6060,928 @@ def sample_update_space_notification_setting(): # Done; return the response. return response + def create_section( + self, + request: Optional[Union[gc_section.CreateSectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + section: Optional[gc_section.Section] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gc_section.Section: + r"""Creates a section in Google Chat. Sections help users group + conversations and customize the list of spaces displayed in Chat + navigation panel. Only sections of type ``CUSTOM_SECTION`` can + be created. For details, see `Create and organize sections in + Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + def sample_create_section(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + section = chat_v1.Section() + section.type_ = "DEFAULT_APPS" + + request = chat_v1.CreateSectionRequest( + parent="parent_value", + section=section, + ) + + # Make the request + response = client.create_section(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.apps.chat_v1.types.CreateSectionRequest, dict]): + The request object. Request message for creating a + section. + parent (str): + Required. The parent resource name where the section is + created. + + Format: ``users/{user}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + section (google.apps.chat_v1.types.Section): + Required. The section to create. + This corresponds to the ``section`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.chat_v1.types.Section: + Represents a [section](https://support.google.com/chat/answer/16059854) in + Google Chat. Sections help users organize their + spaces. There are two types of sections: + + 1. **System Sections:** These are predefined sections + managed by Google Chat. Their resource names are + fixed, and they cannot be created, deleted, or have + their display_name modified. Examples include: \* + users/{user}/sections/default-direct-messages \* + users/{user}/sections/default-spaces \* + users/{user}/sections/default-apps + + 2. **Custom Sections:** These are sections created + and managed by the user. Creating a custom section + using CreateSection **requires** a display_name. + Custom sections can be updated using UpdateSection + and deleted using DeleteSection. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, section] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gc_section.CreateSectionRequest): + request = gc_section.CreateSectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if section is not None: + request.section = section + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_section] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_section( + self, + request: Optional[Union[section.DeleteSectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a section of type ``CUSTOM_SECTION``. + + If the section contains items, such as spaces, the items are + moved to Google Chat's default sections and are not deleted. + + For details, see `Create and organize sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + def sample_delete_section(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.DeleteSectionRequest( + name="name_value", + ) + + # Make the request + client.delete_section(request=request) + + Args: + request (Union[google.apps.chat_v1.types.DeleteSectionRequest, dict]): + The request object. Request message for deleting a section. `Developer + Preview `__. + name (str): + Required. The name of the section to delete. + + Format: ``users/{user}/sections/{section}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, section.DeleteSectionRequest): + request = section.DeleteSectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_section] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_section( + self, + request: Optional[Union[gc_section.UpdateSectionRequest, dict]] = None, + *, + section: Optional[gc_section.Section] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gc_section.Section: + r"""Updates a section. Only sections of type ``CUSTOM_SECTION`` can + be updated. For details, see `Create and organize sections in + Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + def sample_update_section(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + section = chat_v1.Section() + section.type_ = "DEFAULT_APPS" + + request = chat_v1.UpdateSectionRequest( + section=section, + ) + + # Make the request + response = client.update_section(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.apps.chat_v1.types.UpdateSectionRequest, dict]): + The request object. Request message for updating a + section. + section (google.apps.chat_v1.types.Section): + Required. The section to update. + This corresponds to the ``section`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The mask to specify which fields to update. + + Currently supported field paths: + + - ``display_name`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.chat_v1.types.Section: + Represents a [section](https://support.google.com/chat/answer/16059854) in + Google Chat. Sections help users organize their + spaces. There are two types of sections: + + 1. **System Sections:** These are predefined sections + managed by Google Chat. Their resource names are + fixed, and they cannot be created, deleted, or have + their display_name modified. Examples include: \* + users/{user}/sections/default-direct-messages \* + users/{user}/sections/default-spaces \* + users/{user}/sections/default-apps + + 2. **Custom Sections:** These are sections created + and managed by the user. Creating a custom section + using CreateSection **requires** a display_name. + Custom sections can be updated using UpdateSection + and deleted using DeleteSection. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [section, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gc_section.UpdateSectionRequest): + request = gc_section.UpdateSectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if section is not None: + request.section = section + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_section] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("section.name", request.section.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_sections( + self, + request: Optional[Union[section.ListSectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSectionsPager: + r"""Lists sections available to the Chat user. Sections help users + group their conversations and customize the list of spaces + displayed in Chat navigation panel. For details, see `Create and + organize sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + - ``https://www.googleapis.com/auth/chat.users.sections.readonly`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + def sample_list_sections(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.ListSectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sections(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.apps.chat_v1.types.ListSectionsRequest, dict]): + The request object. Request message for listing sections. + parent (str): + Required. The parent, which is the user resource name + that owns this collection of sections. Only supports + listing sections for the calling user. To refer to the + calling user, set one of the following: + + - The ``me`` alias. For example, ``users/me``. + + - Their Workspace email address. For example, + ``users/user@example.com``. + + - Their user id. For example, ``users/123456789``. + + Format: ``users/{user}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.chat_v1.services.chat_service.pagers.ListSectionsPager: + Response message for listing + sections. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, section.ListSectionsRequest): + request = section.ListSectionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_sections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSectionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def position_section( + self, + request: Optional[Union[section.PositionSectionRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> section.PositionSectionResponse: + r"""Changes the sort order of a section. For details, see `Create + and organize sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + def sample_position_section(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.PositionSectionRequest( + sort_order=1091, + name="name_value", + ) + + # Make the request + response = client.position_section(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.apps.chat_v1.types.PositionSectionRequest, dict]): + The request object. Request message for positioning a + section. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.chat_v1.types.PositionSectionResponse: + Response message for positioning a + section. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, section.PositionSectionRequest): + request = section.PositionSectionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.position_section] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_section_items( + self, + request: Optional[Union[section.ListSectionItemsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSectionItemsPager: + r"""Lists items in a section. + + Only spaces can be section items. For details, see `Create and + organize sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + - ``https://www.googleapis.com/auth/chat.users.sections.readonly`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + def sample_list_section_items(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.ListSectionItemsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_section_items(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.apps.chat_v1.types.ListSectionItemsRequest, dict]): + The request object. Request message for listing section + items. + parent (str): + Required. The parent, which is the section resource name + that owns this collection of section items. Only + supports listing section items for the calling user. + + When you're filtering by space, use the wildcard ``-`` + to search across all sections. For example, + ``users/{user}/sections/-``. + + Format: ``users/{user}/sections/{section}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.chat_v1.services.chat_service.pagers.ListSectionItemsPager: + Response message for listing section + items. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, section.ListSectionItemsRequest): + request = section.ListSectionItemsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_section_items] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSectionItemsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def move_section_item( + self, + request: Optional[Union[section.MoveSectionItemRequest, dict]] = None, + *, + name: Optional[str] = None, + target_section: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> section.MoveSectionItemResponse: + r"""Moves an item from one section to another. For example, if a + section contains spaces, this method can be used to move a space + to a different section. For details, see `Create and organize + sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + def sample_move_section_item(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.MoveSectionItemRequest( + name="name_value", + target_section="target_section_value", + ) + + # Make the request + response = client.move_section_item(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.apps.chat_v1.types.MoveSectionItemRequest, dict]): + The request object. Request message for moving a section + item across sections. + name (str): + Required. The resource name of the section item to move. + + Format: ``users/{user}/sections/{section}/items/{item}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + target_section (str): + Required. The resource name of the section to move the + section item to. + + Format: ``users/{user}/sections/{section}`` + + This corresponds to the ``target_section`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.chat_v1.types.MoveSectionItemResponse: + Response message for moving a section + item. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, target_section] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, section.MoveSectionItemRequest): + request = section.MoveSectionItemRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if target_section is not None: + request.target_section = target_section + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.move_section_item] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "ChatServiceClient": return self diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py index 2fa61873496a..ff5b2d48045d 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py @@ -38,7 +38,14 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.apps.chat_v1.types import membership, message, reaction, space, space_event +from google.apps.chat_v1.types import ( + membership, + message, + reaction, + section, + space, + space_event, +) class ListMessagesPager: @@ -1131,3 +1138,315 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSectionsPager: + """A pager for iterating through ``list_sections`` requests. + + This class thinly wraps an initial + :class:`google.apps.chat_v1.types.ListSectionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``sections`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSections`` requests and continue to iterate + through the ``sections`` field on the + corresponding responses. + + All the usual :class:`google.apps.chat_v1.types.ListSectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., section.ListSectionsResponse], + request: section.ListSectionsRequest, + response: section.ListSectionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.chat_v1.types.ListSectionsRequest): + The initial request object. + response (google.apps.chat_v1.types.ListSectionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = section.ListSectionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[section.ListSectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[section.Section]: + for page in self.pages: + yield from page.sections + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSectionsAsyncPager: + """A pager for iterating through ``list_sections`` requests. + + This class thinly wraps an initial + :class:`google.apps.chat_v1.types.ListSectionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``sections`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSections`` requests and continue to iterate + through the ``sections`` field on the + corresponding responses. + + All the usual :class:`google.apps.chat_v1.types.ListSectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[section.ListSectionsResponse]], + request: section.ListSectionsRequest, + response: section.ListSectionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.chat_v1.types.ListSectionsRequest): + The initial request object. + response (google.apps.chat_v1.types.ListSectionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = section.ListSectionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[section.ListSectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[section.Section]: + async def async_generator(): + async for page in self.pages: + for response in page.sections: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSectionItemsPager: + """A pager for iterating through ``list_section_items`` requests. + + This class thinly wraps an initial + :class:`google.apps.chat_v1.types.ListSectionItemsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``section_items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSectionItems`` requests and continue to iterate + through the ``section_items`` field on the + corresponding responses. + + All the usual :class:`google.apps.chat_v1.types.ListSectionItemsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., section.ListSectionItemsResponse], + request: section.ListSectionItemsRequest, + response: section.ListSectionItemsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.chat_v1.types.ListSectionItemsRequest): + The initial request object. + response (google.apps.chat_v1.types.ListSectionItemsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = section.ListSectionItemsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[section.ListSectionItemsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[section.SectionItem]: + for page in self.pages: + yield from page.section_items + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSectionItemsAsyncPager: + """A pager for iterating through ``list_section_items`` requests. + + This class thinly wraps an initial + :class:`google.apps.chat_v1.types.ListSectionItemsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``section_items`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSectionItems`` requests and continue to iterate + through the ``section_items`` field on the + corresponding responses. + + All the usual :class:`google.apps.chat_v1.types.ListSectionItemsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[section.ListSectionItemsResponse]], + request: section.ListSectionItemsRequest, + response: section.ListSectionItemsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.chat_v1.types.ListSectionItemsRequest): + The initial request object. + response (google.apps.chat_v1.types.ListSectionItemsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = section.ListSectionItemsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[section.ListSectionItemsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[section.SectionItem]: + async def async_generator(): + async for page in self.pages: + for response in page.section_items: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py index 124a8e57741f..1726e664dcaa 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py @@ -32,6 +32,7 @@ membership, message, reaction, + section, space, space_event, space_notification_setting, @@ -42,6 +43,7 @@ from google.apps.chat_v1.types import membership as gc_membership from google.apps.chat_v1.types import message as gc_message from google.apps.chat_v1.types import reaction as gc_reaction +from google.apps.chat_v1.types import section as gc_section from google.apps.chat_v1.types import space as gc_space from google.apps.chat_v1.types import ( space_notification_setting as gc_space_notification_setting, @@ -67,9 +69,11 @@ class ChatServiceTransport(abc.ABC): "https://www.googleapis.com/auth/chat.admin.spaces.readonly", "https://www.googleapis.com/auth/chat.app.delete", "https://www.googleapis.com/auth/chat.app.memberships", + "https://www.googleapis.com/auth/chat.app.memberships.readonly", "https://www.googleapis.com/auth/chat.app.messages.readonly", "https://www.googleapis.com/auth/chat.app.spaces", "https://www.googleapis.com/auth/chat.app.spaces.create", + "https://www.googleapis.com/auth/chat.app.spaces.readonly", "https://www.googleapis.com/auth/chat.bot", "https://www.googleapis.com/auth/chat.customemojis", "https://www.googleapis.com/auth/chat.customemojis.readonly", @@ -89,6 +93,8 @@ class ChatServiceTransport(abc.ABC): "https://www.googleapis.com/auth/chat.spaces.readonly", "https://www.googleapis.com/auth/chat.users.readstate", "https://www.googleapis.com/auth/chat.users.readstate.readonly", + "https://www.googleapis.com/auth/chat.users.sections", + "https://www.googleapis.com/auth/chat.users.sections.readonly", "https://www.googleapis.com/auth/chat.users.spacesettings", ) @@ -683,6 +689,104 @@ def _prep_wrapped_messages(self, client_info): default_timeout=30.0, client_info=client_info, ), + self.create_section: gapic_v1.method.wrap_method( + self.create_section, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.delete_section: gapic_v1.method.wrap_method( + self.delete_section, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.update_section: gapic_v1.method.wrap_method( + self.update_section, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.list_sections: gapic_v1.method.wrap_method( + self.list_sections, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.position_section: gapic_v1.method.wrap_method( + self.position_section, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.list_section_items: gapic_v1.method.wrap_method( + self.list_section_items, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.move_section_item: gapic_v1.method.wrap_method( + self.move_section_item, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), } def close(self): @@ -1031,6 +1135,76 @@ def update_space_notification_setting( ]: raise NotImplementedError() + @property + def create_section( + self, + ) -> Callable[ + [gc_section.CreateSectionRequest], + Union[gc_section.Section, Awaitable[gc_section.Section]], + ]: + raise NotImplementedError() + + @property + def delete_section( + self, + ) -> Callable[ + [section.DeleteSectionRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def update_section( + self, + ) -> Callable[ + [gc_section.UpdateSectionRequest], + Union[gc_section.Section, Awaitable[gc_section.Section]], + ]: + raise NotImplementedError() + + @property + def list_sections( + self, + ) -> Callable[ + [section.ListSectionsRequest], + Union[section.ListSectionsResponse, Awaitable[section.ListSectionsResponse]], + ]: + raise NotImplementedError() + + @property + def position_section( + self, + ) -> Callable[ + [section.PositionSectionRequest], + Union[ + section.PositionSectionResponse, Awaitable[section.PositionSectionResponse] + ], + ]: + raise NotImplementedError() + + @property + def list_section_items( + self, + ) -> Callable[ + [section.ListSectionItemsRequest], + Union[ + section.ListSectionItemsResponse, + Awaitable[section.ListSectionItemsResponse], + ], + ]: + raise NotImplementedError() + + @property + def move_section_item( + self, + ) -> Callable[ + [section.MoveSectionItemRequest], + Union[ + section.MoveSectionItemResponse, Awaitable[section.MoveSectionItemResponse] + ], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py index f3d9eecdf896..a5ab6a47b320 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py @@ -34,6 +34,7 @@ membership, message, reaction, + section, space, space_event, space_notification_setting, @@ -44,6 +45,7 @@ from google.apps.chat_v1.types import membership as gc_membership from google.apps.chat_v1.types import message as gc_message from google.apps.chat_v1.types import reaction as gc_reaction +from google.apps.chat_v1.types import section as gc_section from google.apps.chat_v1.types import space as gc_space from google.apps.chat_v1.types import ( space_notification_setting as gc_space_notification_setting, @@ -444,9 +446,7 @@ def list_messages( - `App authentication `__ with `administrator - approval `__ in - `Developer - Preview `__ + approval `__ with the authorization scope: - ``https://www.googleapis.com/auth/chat.app.messages.readonly``. @@ -624,9 +624,7 @@ def get_message(self) -> Callable[[message.GetMessageRequest], message.Message]: that invoke the Chat app. - ``https://www.googleapis.com/auth/chat.app.messages.readonly`` with `administrator - approval `__ - (available in `Developer - Preview `__). + approval `__. When using this authentication scope, this method returns details about a public message in a space. @@ -1974,14 +1972,14 @@ def get_space_event( - `App authentication `__ with `administrator - approval `__ in - `Developer - Preview `__ + approval `__ with one of the following authorization scopes: - ``https://www.googleapis.com/auth/chat.app.spaces`` + - ``https://www.googleapis.com/auth/chat.app.spaces.readonly`` - ``https://www.googleapis.com/auth/chat.app.messages.readonly`` - ``https://www.googleapis.com/auth/chat.app.memberships`` + - ``https://www.googleapis.com/auth/chat.app.memberships.readonly`` - `User authentication `__ @@ -2047,14 +2045,14 @@ def list_space_events( - `App authentication `__ with `administrator - approval `__ in - `Developer - Preview `__ + approval `__ with one of the following authorization scopes: - ``https://www.googleapis.com/auth/chat.app.spaces`` + - ``https://www.googleapis.com/auth/chat.app.spaces.readonly`` - ``https://www.googleapis.com/auth/chat.app.messages.readonly`` - ``https://www.googleapis.com/auth/chat.app.memberships`` + - ``https://www.googleapis.com/auth/chat.app.memberships.readonly`` - `User authentication `__ @@ -2174,6 +2172,267 @@ def update_space_notification_setting( ) return self._stubs["update_space_notification_setting"] + @property + def create_section( + self, + ) -> Callable[[gc_section.CreateSectionRequest], gc_section.Section]: + r"""Return a callable for the create section method over gRPC. + + Creates a section in Google Chat. Sections help users group + conversations and customize the list of spaces displayed in Chat + navigation panel. Only sections of type ``CUSTOM_SECTION`` can + be created. For details, see `Create and organize sections in + Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + Returns: + Callable[[~.CreateSectionRequest], + ~.Section]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_section" not in self._stubs: + self._stubs["create_section"] = self._logged_channel.unary_unary( + "/google.chat.v1.ChatService/CreateSection", + request_serializer=gc_section.CreateSectionRequest.serialize, + response_deserializer=gc_section.Section.deserialize, + ) + return self._stubs["create_section"] + + @property + def delete_section( + self, + ) -> Callable[[section.DeleteSectionRequest], empty_pb2.Empty]: + r"""Return a callable for the delete section method over gRPC. + + Deletes a section of type ``CUSTOM_SECTION``. + + If the section contains items, such as spaces, the items are + moved to Google Chat's default sections and are not deleted. + + For details, see `Create and organize sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + Returns: + Callable[[~.DeleteSectionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_section" not in self._stubs: + self._stubs["delete_section"] = self._logged_channel.unary_unary( + "/google.chat.v1.ChatService/DeleteSection", + request_serializer=section.DeleteSectionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_section"] + + @property + def update_section( + self, + ) -> Callable[[gc_section.UpdateSectionRequest], gc_section.Section]: + r"""Return a callable for the update section method over gRPC. + + Updates a section. Only sections of type ``CUSTOM_SECTION`` can + be updated. For details, see `Create and organize sections in + Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + Returns: + Callable[[~.UpdateSectionRequest], + ~.Section]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_section" not in self._stubs: + self._stubs["update_section"] = self._logged_channel.unary_unary( + "/google.chat.v1.ChatService/UpdateSection", + request_serializer=gc_section.UpdateSectionRequest.serialize, + response_deserializer=gc_section.Section.deserialize, + ) + return self._stubs["update_section"] + + @property + def list_sections( + self, + ) -> Callable[[section.ListSectionsRequest], section.ListSectionsResponse]: + r"""Return a callable for the list sections method over gRPC. + + Lists sections available to the Chat user. Sections help users + group their conversations and customize the list of spaces + displayed in Chat navigation panel. For details, see `Create and + organize sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + - ``https://www.googleapis.com/auth/chat.users.sections.readonly`` + + Returns: + Callable[[~.ListSectionsRequest], + ~.ListSectionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sections" not in self._stubs: + self._stubs["list_sections"] = self._logged_channel.unary_unary( + "/google.chat.v1.ChatService/ListSections", + request_serializer=section.ListSectionsRequest.serialize, + response_deserializer=section.ListSectionsResponse.deserialize, + ) + return self._stubs["list_sections"] + + @property + def position_section( + self, + ) -> Callable[[section.PositionSectionRequest], section.PositionSectionResponse]: + r"""Return a callable for the position section method over gRPC. + + Changes the sort order of a section. For details, see `Create + and organize sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + Returns: + Callable[[~.PositionSectionRequest], + ~.PositionSectionResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "position_section" not in self._stubs: + self._stubs["position_section"] = self._logged_channel.unary_unary( + "/google.chat.v1.ChatService/PositionSection", + request_serializer=section.PositionSectionRequest.serialize, + response_deserializer=section.PositionSectionResponse.deserialize, + ) + return self._stubs["position_section"] + + @property + def list_section_items( + self, + ) -> Callable[[section.ListSectionItemsRequest], section.ListSectionItemsResponse]: + r"""Return a callable for the list section items method over gRPC. + + Lists items in a section. + + Only spaces can be section items. For details, see `Create and + organize sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + - ``https://www.googleapis.com/auth/chat.users.sections.readonly`` + + Returns: + Callable[[~.ListSectionItemsRequest], + ~.ListSectionItemsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_section_items" not in self._stubs: + self._stubs["list_section_items"] = self._logged_channel.unary_unary( + "/google.chat.v1.ChatService/ListSectionItems", + request_serializer=section.ListSectionItemsRequest.serialize, + response_deserializer=section.ListSectionItemsResponse.deserialize, + ) + return self._stubs["list_section_items"] + + @property + def move_section_item( + self, + ) -> Callable[[section.MoveSectionItemRequest], section.MoveSectionItemResponse]: + r"""Return a callable for the move section item method over gRPC. + + Moves an item from one section to another. For example, if a + section contains spaces, this method can be used to move a space + to a different section. For details, see `Create and organize + sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + Returns: + Callable[[~.MoveSectionItemRequest], + ~.MoveSectionItemResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "move_section_item" not in self._stubs: + self._stubs["move_section_item"] = self._logged_channel.unary_unary( + "/google.chat.v1.ChatService/MoveSectionItem", + request_serializer=section.MoveSectionItemRequest.serialize, + response_deserializer=section.MoveSectionItemResponse.deserialize, + ) + return self._stubs["move_section_item"] + def close(self): self._logged_channel.close() diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py index 1dde1db5c5f3..50fac72a710f 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py @@ -37,6 +37,7 @@ membership, message, reaction, + section, space, space_event, space_notification_setting, @@ -47,6 +48,7 @@ from google.apps.chat_v1.types import membership as gc_membership from google.apps.chat_v1.types import message as gc_message from google.apps.chat_v1.types import reaction as gc_reaction +from google.apps.chat_v1.types import section as gc_section from google.apps.chat_v1.types import space as gc_space from google.apps.chat_v1.types import ( space_notification_setting as gc_space_notification_setting, @@ -454,9 +456,7 @@ def list_messages( - `App authentication `__ with `administrator - approval `__ in - `Developer - Preview `__ + approval `__ with the authorization scope: - ``https://www.googleapis.com/auth/chat.app.messages.readonly``. @@ -637,9 +637,7 @@ def get_message( that invoke the Chat app. - ``https://www.googleapis.com/auth/chat.app.messages.readonly`` with `administrator - approval `__ - (available in `Developer - Preview `__). + approval `__. When using this authentication scope, this method returns details about a public message in a space. @@ -2008,14 +2006,14 @@ def get_space_event( - `App authentication `__ with `administrator - approval `__ in - `Developer - Preview `__ + approval `__ with one of the following authorization scopes: - ``https://www.googleapis.com/auth/chat.app.spaces`` + - ``https://www.googleapis.com/auth/chat.app.spaces.readonly`` - ``https://www.googleapis.com/auth/chat.app.messages.readonly`` - ``https://www.googleapis.com/auth/chat.app.memberships`` + - ``https://www.googleapis.com/auth/chat.app.memberships.readonly`` - `User authentication `__ @@ -2082,14 +2080,14 @@ def list_space_events( - `App authentication `__ with `administrator - approval `__ in - `Developer - Preview `__ + approval `__ with one of the following authorization scopes: - ``https://www.googleapis.com/auth/chat.app.spaces`` + - ``https://www.googleapis.com/auth/chat.app.spaces.readonly`` - ``https://www.googleapis.com/auth/chat.app.messages.readonly`` - ``https://www.googleapis.com/auth/chat.app.memberships`` + - ``https://www.googleapis.com/auth/chat.app.memberships.readonly`` - `User authentication `__ @@ -2209,6 +2207,275 @@ def update_space_notification_setting( ) return self._stubs["update_space_notification_setting"] + @property + def create_section( + self, + ) -> Callable[[gc_section.CreateSectionRequest], Awaitable[gc_section.Section]]: + r"""Return a callable for the create section method over gRPC. + + Creates a section in Google Chat. Sections help users group + conversations and customize the list of spaces displayed in Chat + navigation panel. Only sections of type ``CUSTOM_SECTION`` can + be created. For details, see `Create and organize sections in + Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + Returns: + Callable[[~.CreateSectionRequest], + Awaitable[~.Section]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_section" not in self._stubs: + self._stubs["create_section"] = self._logged_channel.unary_unary( + "/google.chat.v1.ChatService/CreateSection", + request_serializer=gc_section.CreateSectionRequest.serialize, + response_deserializer=gc_section.Section.deserialize, + ) + return self._stubs["create_section"] + + @property + def delete_section( + self, + ) -> Callable[[section.DeleteSectionRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete section method over gRPC. + + Deletes a section of type ``CUSTOM_SECTION``. + + If the section contains items, such as spaces, the items are + moved to Google Chat's default sections and are not deleted. + + For details, see `Create and organize sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + Returns: + Callable[[~.DeleteSectionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_section" not in self._stubs: + self._stubs["delete_section"] = self._logged_channel.unary_unary( + "/google.chat.v1.ChatService/DeleteSection", + request_serializer=section.DeleteSectionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_section"] + + @property + def update_section( + self, + ) -> Callable[[gc_section.UpdateSectionRequest], Awaitable[gc_section.Section]]: + r"""Return a callable for the update section method over gRPC. + + Updates a section. Only sections of type ``CUSTOM_SECTION`` can + be updated. For details, see `Create and organize sections in + Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + Returns: + Callable[[~.UpdateSectionRequest], + Awaitable[~.Section]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_section" not in self._stubs: + self._stubs["update_section"] = self._logged_channel.unary_unary( + "/google.chat.v1.ChatService/UpdateSection", + request_serializer=gc_section.UpdateSectionRequest.serialize, + response_deserializer=gc_section.Section.deserialize, + ) + return self._stubs["update_section"] + + @property + def list_sections( + self, + ) -> Callable[ + [section.ListSectionsRequest], Awaitable[section.ListSectionsResponse] + ]: + r"""Return a callable for the list sections method over gRPC. + + Lists sections available to the Chat user. Sections help users + group their conversations and customize the list of spaces + displayed in Chat navigation panel. For details, see `Create and + organize sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + - ``https://www.googleapis.com/auth/chat.users.sections.readonly`` + + Returns: + Callable[[~.ListSectionsRequest], + Awaitable[~.ListSectionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sections" not in self._stubs: + self._stubs["list_sections"] = self._logged_channel.unary_unary( + "/google.chat.v1.ChatService/ListSections", + request_serializer=section.ListSectionsRequest.serialize, + response_deserializer=section.ListSectionsResponse.deserialize, + ) + return self._stubs["list_sections"] + + @property + def position_section( + self, + ) -> Callable[ + [section.PositionSectionRequest], Awaitable[section.PositionSectionResponse] + ]: + r"""Return a callable for the position section method over gRPC. + + Changes the sort order of a section. For details, see `Create + and organize sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + Returns: + Callable[[~.PositionSectionRequest], + Awaitable[~.PositionSectionResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "position_section" not in self._stubs: + self._stubs["position_section"] = self._logged_channel.unary_unary( + "/google.chat.v1.ChatService/PositionSection", + request_serializer=section.PositionSectionRequest.serialize, + response_deserializer=section.PositionSectionResponse.deserialize, + ) + return self._stubs["position_section"] + + @property + def list_section_items( + self, + ) -> Callable[ + [section.ListSectionItemsRequest], Awaitable[section.ListSectionItemsResponse] + ]: + r"""Return a callable for the list section items method over gRPC. + + Lists items in a section. + + Only spaces can be section items. For details, see `Create and + organize sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + - ``https://www.googleapis.com/auth/chat.users.sections.readonly`` + + Returns: + Callable[[~.ListSectionItemsRequest], + Awaitable[~.ListSectionItemsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_section_items" not in self._stubs: + self._stubs["list_section_items"] = self._logged_channel.unary_unary( + "/google.chat.v1.ChatService/ListSectionItems", + request_serializer=section.ListSectionItemsRequest.serialize, + response_deserializer=section.ListSectionItemsResponse.deserialize, + ) + return self._stubs["list_section_items"] + + @property + def move_section_item( + self, + ) -> Callable[ + [section.MoveSectionItemRequest], Awaitable[section.MoveSectionItemResponse] + ]: + r"""Return a callable for the move section item method over gRPC. + + Moves an item from one section to another. For example, if a + section contains spaces, this method can be used to move a space + to a different section. For details, see `Create and organize + sections in Google + Chat `__. + + Requires `user + authentication `__ + with the `authorization + scope `__: + + - ``https://www.googleapis.com/auth/chat.users.sections`` + + Returns: + Callable[[~.MoveSectionItemRequest], + Awaitable[~.MoveSectionItemResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "move_section_item" not in self._stubs: + self._stubs["move_section_item"] = self._logged_channel.unary_unary( + "/google.chat.v1.ChatService/MoveSectionItem", + request_serializer=section.MoveSectionItemRequest.serialize, + response_deserializer=section.MoveSectionItemResponse.deserialize, + ) + return self._stubs["move_section_item"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -2702,6 +2969,104 @@ def _prep_wrapped_messages(self, client_info): default_timeout=30.0, client_info=client_info, ), + self.create_section: self._wrap_method( + self.create_section, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.delete_section: self._wrap_method( + self.delete_section, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.update_section: self._wrap_method( + self.update_section, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.list_sections: self._wrap_method( + self.list_sections, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.position_section: self._wrap_method( + self.position_section, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.list_section_items: self._wrap_method( + self.list_section_items, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.move_section_item: self._wrap_method( + self.move_section_item, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py index a0b5a6ea4756..a8cd7e10e435 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py @@ -34,6 +34,7 @@ membership, message, reaction, + section, space, space_event, space_notification_setting, @@ -44,6 +45,7 @@ from google.apps.chat_v1.types import membership as gc_membership from google.apps.chat_v1.types import message as gc_message from google.apps.chat_v1.types import reaction as gc_reaction +from google.apps.chat_v1.types import section as gc_section from google.apps.chat_v1.types import space as gc_space from google.apps.chat_v1.types import ( space_notification_setting as gc_space_notification_setting, @@ -132,6 +134,14 @@ def post_create_reaction(self, response): logging.log(f"Received response: {response}") return response + def pre_create_section(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_section(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_space(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -160,6 +170,10 @@ def pre_delete_reaction(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_delete_section(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def pre_delete_space(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -276,6 +290,22 @@ def post_list_reactions(self, response): logging.log(f"Received response: {response}") return response + def pre_list_section_items(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_section_items(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_sections(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_sections(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_space_events(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -292,6 +322,22 @@ def post_list_spaces(self, response): logging.log(f"Received response: {response}") return response + def pre_move_section_item(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_move_section_item(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_position_section(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_position_section(self, response): + logging.log(f"Received response: {response}") + return response + def pre_search_spaces(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -324,6 +370,14 @@ def post_update_message(self, response): logging.log(f"Received response: {response}") return response + def pre_update_section(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_section(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_space(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -602,6 +656,52 @@ def post_create_reaction_with_metadata( """ return response, metadata + def pre_create_section( + self, + request: gc_section.CreateSectionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gc_section.CreateSectionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for create_section + + Override in a subclass to manipulate the request or metadata + before they are sent to the ChatService server. + """ + return request, metadata + + def post_create_section(self, response: gc_section.Section) -> gc_section.Section: + """Post-rpc interceptor for create_section + + DEPRECATED. Please use the `post_create_section_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ChatService server but before + it is returned to user code. This `post_create_section` interceptor runs + before the `post_create_section_with_metadata` interceptor. + """ + return response + + def post_create_section_with_metadata( + self, + response: gc_section.Section, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gc_section.Section, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_section + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_create_section_with_metadata` + interceptor in new development instead of the `post_create_section` interceptor. + When both interceptors are used, this `post_create_section_with_metadata` interceptor runs after the + `post_create_section` interceptor. The (possibly modified) response returned by + `post_create_section` will be passed to + `post_create_section_with_metadata`. + """ + return response, metadata + def pre_create_space( self, request: gc_space.CreateSpaceRequest, @@ -732,6 +832,18 @@ def pre_delete_reaction( """ return request, metadata + def pre_delete_section( + self, + request: section.DeleteSectionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[section.DeleteSectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_section + + Override in a subclass to manipulate the request or metadata + before they are sent to the ChatService server. + """ + return request, metadata + def pre_delete_space( self, request: space.DeleteSpaceRequest, @@ -1408,6 +1520,102 @@ def post_list_reactions_with_metadata( """ return response, metadata + def pre_list_section_items( + self, + request: section.ListSectionItemsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + section.ListSectionItemsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_section_items + + Override in a subclass to manipulate the request or metadata + before they are sent to the ChatService server. + """ + return request, metadata + + def post_list_section_items( + self, response: section.ListSectionItemsResponse + ) -> section.ListSectionItemsResponse: + """Post-rpc interceptor for list_section_items + + DEPRECATED. Please use the `post_list_section_items_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ChatService server but before + it is returned to user code. This `post_list_section_items` interceptor runs + before the `post_list_section_items_with_metadata` interceptor. + """ + return response + + def post_list_section_items_with_metadata( + self, + response: section.ListSectionItemsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + section.ListSectionItemsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_section_items + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_list_section_items_with_metadata` + interceptor in new development instead of the `post_list_section_items` interceptor. + When both interceptors are used, this `post_list_section_items_with_metadata` interceptor runs after the + `post_list_section_items` interceptor. The (possibly modified) response returned by + `post_list_section_items` will be passed to + `post_list_section_items_with_metadata`. + """ + return response, metadata + + def pre_list_sections( + self, + request: section.ListSectionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[section.ListSectionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_sections + + Override in a subclass to manipulate the request or metadata + before they are sent to the ChatService server. + """ + return request, metadata + + def post_list_sections( + self, response: section.ListSectionsResponse + ) -> section.ListSectionsResponse: + """Post-rpc interceptor for list_sections + + DEPRECATED. Please use the `post_list_sections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ChatService server but before + it is returned to user code. This `post_list_sections` interceptor runs + before the `post_list_sections_with_metadata` interceptor. + """ + return response + + def post_list_sections_with_metadata( + self, + response: section.ListSectionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[section.ListSectionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_sections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_list_sections_with_metadata` + interceptor in new development instead of the `post_list_sections` interceptor. + When both interceptors are used, this `post_list_sections_with_metadata` interceptor runs after the + `post_list_sections` interceptor. The (possibly modified) response returned by + `post_list_sections` will be passed to + `post_list_sections_with_metadata`. + """ + return response, metadata + def pre_list_space_events( self, request: space_event.ListSpaceEventsRequest, @@ -1504,6 +1712,102 @@ def post_list_spaces_with_metadata( """ return response, metadata + def pre_move_section_item( + self, + request: section.MoveSectionItemRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[section.MoveSectionItemRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for move_section_item + + Override in a subclass to manipulate the request or metadata + before they are sent to the ChatService server. + """ + return request, metadata + + def post_move_section_item( + self, response: section.MoveSectionItemResponse + ) -> section.MoveSectionItemResponse: + """Post-rpc interceptor for move_section_item + + DEPRECATED. Please use the `post_move_section_item_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ChatService server but before + it is returned to user code. This `post_move_section_item` interceptor runs + before the `post_move_section_item_with_metadata` interceptor. + """ + return response + + def post_move_section_item_with_metadata( + self, + response: section.MoveSectionItemResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + section.MoveSectionItemResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for move_section_item + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_move_section_item_with_metadata` + interceptor in new development instead of the `post_move_section_item` interceptor. + When both interceptors are used, this `post_move_section_item_with_metadata` interceptor runs after the + `post_move_section_item` interceptor. The (possibly modified) response returned by + `post_move_section_item` will be passed to + `post_move_section_item_with_metadata`. + """ + return response, metadata + + def pre_position_section( + self, + request: section.PositionSectionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[section.PositionSectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for position_section + + Override in a subclass to manipulate the request or metadata + before they are sent to the ChatService server. + """ + return request, metadata + + def post_position_section( + self, response: section.PositionSectionResponse + ) -> section.PositionSectionResponse: + """Post-rpc interceptor for position_section + + DEPRECATED. Please use the `post_position_section_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ChatService server but before + it is returned to user code. This `post_position_section` interceptor runs + before the `post_position_section_with_metadata` interceptor. + """ + return response + + def post_position_section_with_metadata( + self, + response: section.PositionSectionResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + section.PositionSectionResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for position_section + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_position_section_with_metadata` + interceptor in new development instead of the `post_position_section` interceptor. + When both interceptors are used, this `post_position_section_with_metadata` interceptor runs after the + `post_position_section` interceptor. The (possibly modified) response returned by + `post_position_section` will be passed to + `post_position_section_with_metadata`. + """ + return response, metadata + def pre_search_spaces( self, request: space.SearchSpacesRequest, @@ -1686,6 +1990,52 @@ def post_update_message_with_metadata( """ return response, metadata + def pre_update_section( + self, + request: gc_section.UpdateSectionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gc_section.UpdateSectionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_section + + Override in a subclass to manipulate the request or metadata + before they are sent to the ChatService server. + """ + return request, metadata + + def post_update_section(self, response: gc_section.Section) -> gc_section.Section: + """Post-rpc interceptor for update_section + + DEPRECATED. Please use the `post_update_section_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ChatService server but before + it is returned to user code. This `post_update_section` interceptor runs + before the `post_update_section_with_metadata` interceptor. + """ + return response + + def post_update_section_with_metadata( + self, + response: gc_section.Section, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gc_section.Section, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_section + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_update_section_with_metadata` + interceptor in new development instead of the `post_update_section` interceptor. + When both interceptors are used, this `post_update_section_with_metadata` interceptor runs after the + `post_update_section` interceptor. The (possibly modified) response returned by + `post_update_section` will be passed to + `post_update_section_with_metadata`. + """ + return response, metadata + def pre_update_space( self, request: gc_space.UpdateSpaceRequest, @@ -2748,11 +3098,11 @@ def __call__( ) return resp - class _CreateSpace( - _BaseChatServiceRestTransport._BaseCreateSpace, ChatServiceRestStub + class _CreateSection( + _BaseChatServiceRestTransport._BaseCreateSection, ChatServiceRestStub ): def __hash__(self): - return hash("ChatServiceRestTransport.CreateSpace") + return hash("ChatServiceRestTransport.CreateSection") @staticmethod def _get_response( @@ -2779,18 +3129,18 @@ def _get_response( def __call__( self, - request: gc_space.CreateSpaceRequest, + request: gc_section.CreateSectionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gc_space.Space: - r"""Call the create space method over HTTP. + ) -> gc_section.Section: + r"""Call the create section method over HTTP. Args: - request (~.gc_space.CreateSpaceRequest): - The request object. A request to create a named space - with no members. + request (~.gc_section.CreateSectionRequest): + The request object. Request message for creating a + section. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2800,34 +3150,48 @@ def __call__( be of type `bytes`. Returns: - ~.gc_space.Space: - A space in Google Chat. Spaces are - conversations between two or more users - or 1:1 messages between a user and a - Chat app. + ~.gc_section.Section: + Represents a + `section `__ + in Google Chat. Sections help users organize their + spaces. There are two types of sections: + + 1. **System Sections:** These are predefined sections + managed by Google Chat. Their resource names are + fixed, and they cannot be created, deleted, or have + their ``display_name`` modified. Examples include: + + - ``users/{user}/sections/default-direct-messages`` + - ``users/{user}/sections/default-spaces`` + - ``users/{user}/sections/default-apps`` + + 2. **Custom Sections:** These are sections created and + managed by the user. Creating a custom section using + ``CreateSection`` **requires** a ``display_name``. + Custom sections can be updated using + ``UpdateSection`` and deleted using + ``DeleteSection``. """ http_options = ( - _BaseChatServiceRestTransport._BaseCreateSpace._get_http_options() + _BaseChatServiceRestTransport._BaseCreateSection._get_http_options() ) - request, metadata = self._interceptor.pre_create_space(request, metadata) - transcoded_request = ( - _BaseChatServiceRestTransport._BaseCreateSpace._get_transcoded_request( - http_options, request - ) + request, metadata = self._interceptor.pre_create_section(request, metadata) + transcoded_request = _BaseChatServiceRestTransport._BaseCreateSection._get_transcoded_request( + http_options, request ) body = ( - _BaseChatServiceRestTransport._BaseCreateSpace._get_request_body_json( + _BaseChatServiceRestTransport._BaseCreateSection._get_request_body_json( transcoded_request ) ) # Jsonify the query params query_params = ( - _BaseChatServiceRestTransport._BaseCreateSpace._get_query_params_json( + _BaseChatServiceRestTransport._BaseCreateSection._get_query_params_json( transcoded_request ) ) @@ -2850,17 +3214,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.chat_v1.ChatServiceClient.CreateSpace", + f"Sending request for google.chat_v1.ChatServiceClient.CreateSection", extra={ "serviceName": "google.chat.v1.ChatService", - "rpcName": "CreateSpace", + "rpcName": "CreateSection", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ChatServiceRestTransport._CreateSpace._get_response( + response = ChatServiceRestTransport._CreateSection._get_response( self._host, metadata, query_params, @@ -2876,21 +3240,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = gc_space.Space() - pb_resp = gc_space.Space.pb(resp) + resp = gc_section.Section() + pb_resp = gc_section.Section.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_space(resp) + resp = self._interceptor.post_create_section(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_space_with_metadata( + resp, _ = self._interceptor.post_create_section_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = gc_space.Space.to_json(response) + response_payload = gc_section.Section.to_json(response) except: response_payload = None http_response = { @@ -2899,21 +3263,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.chat_v1.ChatServiceClient.create_space", + "Received response for google.chat_v1.ChatServiceClient.create_section", extra={ "serviceName": "google.chat.v1.ChatService", - "rpcName": "CreateSpace", + "rpcName": "CreateSection", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _DeleteCustomEmoji( - _BaseChatServiceRestTransport._BaseDeleteCustomEmoji, ChatServiceRestStub + class _CreateSpace( + _BaseChatServiceRestTransport._BaseCreateSpace, ChatServiceRestStub ): def __hash__(self): - return hash("ChatServiceRestTransport.DeleteCustomEmoji") + return hash("ChatServiceRestTransport.CreateSpace") @staticmethod def _get_response( @@ -2934,18 +3298,179 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__( self, - request: reaction.DeleteCustomEmojiRequest, + request: gc_space.CreateSpaceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ): - r"""Call the delete custom emoji method over HTTP. + ) -> gc_space.Space: + r"""Call the create space method over HTTP. + + Args: + request (~.gc_space.CreateSpaceRequest): + The request object. A request to create a named space + with no members. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gc_space.Space: + A space in Google Chat. Spaces are + conversations between two or more users + or 1:1 messages between a user and a + Chat app. + + """ + + http_options = ( + _BaseChatServiceRestTransport._BaseCreateSpace._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_space(request, metadata) + transcoded_request = ( + _BaseChatServiceRestTransport._BaseCreateSpace._get_transcoded_request( + http_options, request + ) + ) + + body = ( + _BaseChatServiceRestTransport._BaseCreateSpace._get_request_body_json( + transcoded_request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseChatServiceRestTransport._BaseCreateSpace._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.chat_v1.ChatServiceClient.CreateSpace", + extra={ + "serviceName": "google.chat.v1.ChatService", + "rpcName": "CreateSpace", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ChatServiceRestTransport._CreateSpace._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gc_space.Space() + pb_resp = gc_space.Space.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_space(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_space_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gc_space.Space.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.chat_v1.ChatServiceClient.create_space", + extra={ + "serviceName": "google.chat.v1.ChatService", + "rpcName": "CreateSpace", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteCustomEmoji( + _BaseChatServiceRestTransport._BaseDeleteCustomEmoji, ChatServiceRestStub + ): + def __hash__(self): + return hash("ChatServiceRestTransport.DeleteCustomEmoji") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: reaction.DeleteCustomEmojiRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete custom emoji method over HTTP. Args: request (~.reaction.DeleteCustomEmojiRequest): @@ -3382,6 +3907,115 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + class _DeleteSection( + _BaseChatServiceRestTransport._BaseDeleteSection, ChatServiceRestStub + ): + def __hash__(self): + return hash("ChatServiceRestTransport.DeleteSection") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: section.DeleteSectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete section method over HTTP. + + Args: + request (~.section.DeleteSectionRequest): + The request object. Request message for deleting a section. `Developer + Preview `__. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseChatServiceRestTransport._BaseDeleteSection._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_section(request, metadata) + transcoded_request = _BaseChatServiceRestTransport._BaseDeleteSection._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = ( + _BaseChatServiceRestTransport._BaseDeleteSection._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.chat_v1.ChatServiceClient.DeleteSection", + extra={ + "serviceName": "google.chat.v1.ChatService", + "rpcName": "DeleteSection", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ChatServiceRestTransport._DeleteSection._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + class _DeleteSpace( _BaseChatServiceRestTransport._BaseDeleteSpace, ChatServiceRestStub ): @@ -5594,8 +6228,309 @@ def __call__( ) return resp - class _ListSpaceEvents( - _BaseChatServiceRestTransport._BaseListSpaceEvents, ChatServiceRestStub + class _ListSectionItems( + _BaseChatServiceRestTransport._BaseListSectionItems, ChatServiceRestStub + ): + def __hash__(self): + return hash("ChatServiceRestTransport.ListSectionItems") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: section.ListSectionItemsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> section.ListSectionItemsResponse: + r"""Call the list section items method over HTTP. + + Args: + request (~.section.ListSectionItemsRequest): + The request object. Request message for listing section + items. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.section.ListSectionItemsResponse: + Response message for listing section + items. + + """ + + http_options = ( + _BaseChatServiceRestTransport._BaseListSectionItems._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_section_items( + request, metadata + ) + transcoded_request = _BaseChatServiceRestTransport._BaseListSectionItems._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseChatServiceRestTransport._BaseListSectionItems._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.chat_v1.ChatServiceClient.ListSectionItems", + extra={ + "serviceName": "google.chat.v1.ChatService", + "rpcName": "ListSectionItems", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ChatServiceRestTransport._ListSectionItems._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = section.ListSectionItemsResponse() + pb_resp = section.ListSectionItemsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_section_items(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_section_items_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = section.ListSectionItemsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.chat_v1.ChatServiceClient.list_section_items", + extra={ + "serviceName": "google.chat.v1.ChatService", + "rpcName": "ListSectionItems", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListSections( + _BaseChatServiceRestTransport._BaseListSections, ChatServiceRestStub + ): + def __hash__(self): + return hash("ChatServiceRestTransport.ListSections") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: section.ListSectionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> section.ListSectionsResponse: + r"""Call the list sections method over HTTP. + + Args: + request (~.section.ListSectionsRequest): + The request object. Request message for listing sections. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.section.ListSectionsResponse: + Response message for listing + sections. + + """ + + http_options = ( + _BaseChatServiceRestTransport._BaseListSections._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_sections(request, metadata) + transcoded_request = ( + _BaseChatServiceRestTransport._BaseListSections._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseChatServiceRestTransport._BaseListSections._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.chat_v1.ChatServiceClient.ListSections", + extra={ + "serviceName": "google.chat.v1.ChatService", + "rpcName": "ListSections", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ChatServiceRestTransport._ListSections._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = section.ListSectionsResponse() + pb_resp = section.ListSectionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_sections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sections_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = section.ListSectionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.chat_v1.ChatServiceClient.list_sections", + extra={ + "serviceName": "google.chat.v1.ChatService", + "rpcName": "ListSections", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListSpaceEvents( + _BaseChatServiceRestTransport._BaseListSpaceEvents, ChatServiceRestStub ): def __hash__(self): return hash("ChatServiceRestTransport.ListSpaceEvents") @@ -5624,18 +6559,321 @@ def _get_response( def __call__( self, - request: space_event.ListSpaceEventsRequest, + request: space_event.ListSpaceEventsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> space_event.ListSpaceEventsResponse: + r"""Call the list space events method over HTTP. + + Args: + request (~.space_event.ListSpaceEventsRequest): + The request object. Request message for listing space + events. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.space_event.ListSpaceEventsResponse: + Response message for listing space + events. + + """ + + http_options = ( + _BaseChatServiceRestTransport._BaseListSpaceEvents._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_space_events( + request, metadata + ) + transcoded_request = _BaseChatServiceRestTransport._BaseListSpaceEvents._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseChatServiceRestTransport._BaseListSpaceEvents._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.chat_v1.ChatServiceClient.ListSpaceEvents", + extra={ + "serviceName": "google.chat.v1.ChatService", + "rpcName": "ListSpaceEvents", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ChatServiceRestTransport._ListSpaceEvents._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = space_event.ListSpaceEventsResponse() + pb_resp = space_event.ListSpaceEventsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_space_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_space_events_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = space_event.ListSpaceEventsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.chat_v1.ChatServiceClient.list_space_events", + extra={ + "serviceName": "google.chat.v1.ChatService", + "rpcName": "ListSpaceEvents", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListSpaces( + _BaseChatServiceRestTransport._BaseListSpaces, ChatServiceRestStub + ): + def __hash__(self): + return hash("ChatServiceRestTransport.ListSpaces") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: space.ListSpacesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> space.ListSpacesResponse: + r"""Call the list spaces method over HTTP. + + Args: + request (~.space.ListSpacesRequest): + The request object. A request to list the spaces the + caller is a member of. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.space.ListSpacesResponse: + The response for a list spaces + request. + + """ + + http_options = ( + _BaseChatServiceRestTransport._BaseListSpaces._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_spaces(request, metadata) + transcoded_request = ( + _BaseChatServiceRestTransport._BaseListSpaces._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseChatServiceRestTransport._BaseListSpaces._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.chat_v1.ChatServiceClient.ListSpaces", + extra={ + "serviceName": "google.chat.v1.ChatService", + "rpcName": "ListSpaces", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ChatServiceRestTransport._ListSpaces._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = space.ListSpacesResponse() + pb_resp = space.ListSpacesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_spaces(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_spaces_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = space.ListSpacesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.chat_v1.ChatServiceClient.list_spaces", + extra={ + "serviceName": "google.chat.v1.ChatService", + "rpcName": "ListSpaces", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _MoveSectionItem( + _BaseChatServiceRestTransport._BaseMoveSectionItem, ChatServiceRestStub + ): + def __hash__(self): + return hash("ChatServiceRestTransport.MoveSectionItem") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: section.MoveSectionItemRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> space_event.ListSpaceEventsResponse: - r"""Call the list space events method over HTTP. + ) -> section.MoveSectionItemResponse: + r"""Call the move section item method over HTTP. Args: - request (~.space_event.ListSpaceEventsRequest): - The request object. Request message for listing space - events. + request (~.section.MoveSectionItemRequest): + The request object. Request message for moving a section + item across sections. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -5645,25 +6883,29 @@ def __call__( be of type `bytes`. Returns: - ~.space_event.ListSpaceEventsResponse: - Response message for listing space - events. + ~.section.MoveSectionItemResponse: + Response message for moving a section + item. """ http_options = ( - _BaseChatServiceRestTransport._BaseListSpaceEvents._get_http_options() + _BaseChatServiceRestTransport._BaseMoveSectionItem._get_http_options() ) - request, metadata = self._interceptor.pre_list_space_events( + request, metadata = self._interceptor.pre_move_section_item( request, metadata ) - transcoded_request = _BaseChatServiceRestTransport._BaseListSpaceEvents._get_transcoded_request( + transcoded_request = _BaseChatServiceRestTransport._BaseMoveSectionItem._get_transcoded_request( http_options, request ) + body = _BaseChatServiceRestTransport._BaseMoveSectionItem._get_request_body_json( + transcoded_request + ) + # Jsonify the query params - query_params = _BaseChatServiceRestTransport._BaseListSpaceEvents._get_query_params_json( + query_params = _BaseChatServiceRestTransport._BaseMoveSectionItem._get_query_params_json( transcoded_request ) @@ -5685,23 +6927,24 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.chat_v1.ChatServiceClient.ListSpaceEvents", + f"Sending request for google.chat_v1.ChatServiceClient.MoveSectionItem", extra={ "serviceName": "google.chat.v1.ChatService", - "rpcName": "ListSpaceEvents", + "rpcName": "MoveSectionItem", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ChatServiceRestTransport._ListSpaceEvents._get_response( + response = ChatServiceRestTransport._MoveSectionItem._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -5710,23 +6953,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = space_event.ListSpaceEventsResponse() - pb_resp = space_event.ListSpaceEventsResponse.pb(resp) + resp = section.MoveSectionItemResponse() + pb_resp = section.MoveSectionItemResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_space_events(resp) + resp = self._interceptor.post_move_section_item(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_space_events_with_metadata( + resp, _ = self._interceptor.post_move_section_item_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = space_event.ListSpaceEventsResponse.to_json( - response - ) + response_payload = section.MoveSectionItemResponse.to_json(response) except: response_payload = None http_response = { @@ -5735,21 +6976,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.chat_v1.ChatServiceClient.list_space_events", + "Received response for google.chat_v1.ChatServiceClient.move_section_item", extra={ "serviceName": "google.chat.v1.ChatService", - "rpcName": "ListSpaceEvents", + "rpcName": "MoveSectionItem", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListSpaces( - _BaseChatServiceRestTransport._BaseListSpaces, ChatServiceRestStub + class _PositionSection( + _BaseChatServiceRestTransport._BasePositionSection, ChatServiceRestStub ): def __hash__(self): - return hash("ChatServiceRestTransport.ListSpaces") + return hash("ChatServiceRestTransport.PositionSection") @staticmethod def _get_response( @@ -5770,23 +7011,24 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__( self, - request: space.ListSpacesRequest, + request: section.PositionSectionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> space.ListSpacesResponse: - r"""Call the list spaces method over HTTP. + ) -> section.PositionSectionResponse: + r"""Call the position section method over HTTP. Args: - request (~.space.ListSpacesRequest): - The request object. A request to list the spaces the - caller is a member of. + request (~.section.PositionSectionRequest): + The request object. Request message for positioning a + section. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -5796,28 +7038,30 @@ def __call__( be of type `bytes`. Returns: - ~.space.ListSpacesResponse: - The response for a list spaces - request. + ~.section.PositionSectionResponse: + Response message for positioning a + section. """ http_options = ( - _BaseChatServiceRestTransport._BaseListSpaces._get_http_options() + _BaseChatServiceRestTransport._BasePositionSection._get_http_options() ) - request, metadata = self._interceptor.pre_list_spaces(request, metadata) - transcoded_request = ( - _BaseChatServiceRestTransport._BaseListSpaces._get_transcoded_request( - http_options, request - ) + request, metadata = self._interceptor.pre_position_section( + request, metadata + ) + transcoded_request = _BaseChatServiceRestTransport._BasePositionSection._get_transcoded_request( + http_options, request + ) + + body = _BaseChatServiceRestTransport._BasePositionSection._get_request_body_json( + transcoded_request ) # Jsonify the query params - query_params = ( - _BaseChatServiceRestTransport._BaseListSpaces._get_query_params_json( - transcoded_request - ) + query_params = _BaseChatServiceRestTransport._BasePositionSection._get_query_params_json( + transcoded_request ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -5838,23 +7082,24 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.chat_v1.ChatServiceClient.ListSpaces", + f"Sending request for google.chat_v1.ChatServiceClient.PositionSection", extra={ "serviceName": "google.chat.v1.ChatService", - "rpcName": "ListSpaces", + "rpcName": "PositionSection", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ChatServiceRestTransport._ListSpaces._get_response( + response = ChatServiceRestTransport._PositionSection._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -5863,21 +7108,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = space.ListSpacesResponse() - pb_resp = space.ListSpacesResponse.pb(resp) + resp = section.PositionSectionResponse() + pb_resp = section.PositionSectionResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_spaces(resp) + resp = self._interceptor.post_position_section(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_spaces_with_metadata( + resp, _ = self._interceptor.post_position_section_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = space.ListSpacesResponse.to_json(response) + response_payload = section.PositionSectionResponse.to_json(response) except: response_payload = None http_response = { @@ -5886,10 +7131,10 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.chat_v1.ChatServiceClient.list_spaces", + "Received response for google.chat_v1.ChatServiceClient.position_section", extra={ "serviceName": "google.chat.v1.ChatService", - "rpcName": "ListSpaces", + "rpcName": "PositionSection", "metadata": http_response["headers"], "httpResponse": http_response, }, @@ -6518,6 +7763,181 @@ def __call__( ) return resp + class _UpdateSection( + _BaseChatServiceRestTransport._BaseUpdateSection, ChatServiceRestStub + ): + def __hash__(self): + return hash("ChatServiceRestTransport.UpdateSection") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gc_section.UpdateSectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gc_section.Section: + r"""Call the update section method over HTTP. + + Args: + request (~.gc_section.UpdateSectionRequest): + The request object. Request message for updating a + section. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gc_section.Section: + Represents a + `section `__ + in Google Chat. Sections help users organize their + spaces. There are two types of sections: + + 1. **System Sections:** These are predefined sections + managed by Google Chat. Their resource names are + fixed, and they cannot be created, deleted, or have + their ``display_name`` modified. Examples include: + + - ``users/{user}/sections/default-direct-messages`` + - ``users/{user}/sections/default-spaces`` + - ``users/{user}/sections/default-apps`` + + 2. **Custom Sections:** These are sections created and + managed by the user. Creating a custom section using + ``CreateSection`` **requires** a ``display_name``. + Custom sections can be updated using + ``UpdateSection`` and deleted using + ``DeleteSection``. + + """ + + http_options = ( + _BaseChatServiceRestTransport._BaseUpdateSection._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_section(request, metadata) + transcoded_request = _BaseChatServiceRestTransport._BaseUpdateSection._get_transcoded_request( + http_options, request + ) + + body = ( + _BaseChatServiceRestTransport._BaseUpdateSection._get_request_body_json( + transcoded_request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseChatServiceRestTransport._BaseUpdateSection._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.chat_v1.ChatServiceClient.UpdateSection", + extra={ + "serviceName": "google.chat.v1.ChatService", + "rpcName": "UpdateSection", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ChatServiceRestTransport._UpdateSection._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gc_section.Section() + pb_resp = gc_section.Section.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_section(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_section_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gc_section.Section.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.chat_v1.ChatServiceClient.update_section", + extra={ + "serviceName": "google.chat.v1.ChatService", + "rpcName": "UpdateSection", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _UpdateSpace( _BaseChatServiceRestTransport._BaseUpdateSpace, ChatServiceRestStub ): @@ -7195,6 +8615,14 @@ def create_reaction( # In C++ this would require a dynamic_cast return self._CreateReaction(self._session, self._host, self._interceptor) # type: ignore + @property + def create_section( + self, + ) -> Callable[[gc_section.CreateSectionRequest], gc_section.Section]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSection(self._session, self._host, self._interceptor) # type: ignore + @property def create_space(self) -> Callable[[gc_space.CreateSpaceRequest], gc_space.Space]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. @@ -7233,6 +8661,14 @@ def delete_reaction( # In C++ this would require a dynamic_cast return self._DeleteReaction(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_section( + self, + ) -> Callable[[section.DeleteSectionRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSection(self._session, self._host, self._interceptor) # type: ignore + @property def delete_space(self) -> Callable[[space.DeleteSpaceRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. @@ -7360,6 +8796,22 @@ def list_reactions( # In C++ this would require a dynamic_cast return self._ListReactions(self._session, self._host, self._interceptor) # type: ignore + @property + def list_section_items( + self, + ) -> Callable[[section.ListSectionItemsRequest], section.ListSectionItemsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSectionItems(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_sections( + self, + ) -> Callable[[section.ListSectionsRequest], section.ListSectionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSections(self._session, self._host, self._interceptor) # type: ignore + @property def list_space_events( self, @@ -7378,6 +8830,22 @@ def list_spaces( # In C++ this would require a dynamic_cast return self._ListSpaces(self._session, self._host, self._interceptor) # type: ignore + @property + def move_section_item( + self, + ) -> Callable[[section.MoveSectionItemRequest], section.MoveSectionItemResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._MoveSectionItem(self._session, self._host, self._interceptor) # type: ignore + + @property + def position_section( + self, + ) -> Callable[[section.PositionSectionRequest], section.PositionSectionResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PositionSection(self._session, self._host, self._interceptor) # type: ignore + @property def search_spaces( self, @@ -7408,6 +8876,14 @@ def update_message( # In C++ this would require a dynamic_cast return self._UpdateMessage(self._session, self._host, self._interceptor) # type: ignore + @property + def update_section( + self, + ) -> Callable[[gc_section.UpdateSectionRequest], gc_section.Section]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSection(self._session, self._host, self._interceptor) # type: ignore + @property def update_space(self) -> Callable[[gc_space.UpdateSpaceRequest], gc_space.Space]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest_base.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest_base.py index 096dc810d09c..683a9f1fe395 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest_base.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest_base.py @@ -26,6 +26,7 @@ membership, message, reaction, + section, space, space_event, space_notification_setting, @@ -36,6 +37,7 @@ from google.apps.chat_v1.types import membership as gc_membership from google.apps.chat_v1.types import message as gc_message from google.apps.chat_v1.types import reaction as gc_reaction +from google.apps.chat_v1.types import section as gc_section from google.apps.chat_v1.types import space as gc_space from google.apps.chat_v1.types import ( space_notification_setting as gc_space_notification_setting, @@ -392,6 +394,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseCreateSection: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=users/*}/sections", + "body": "section", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gc_section.CreateSectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseChatServiceRestTransport._BaseCreateSection._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseCreateSpace: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -637,6 +696,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseDeleteSection: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=users/*/sections/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = section.DeleteSectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseChatServiceRestTransport._BaseDeleteSection._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseDeleteSpace: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1333,6 +1439,100 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseListSectionItems: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=users/*/sections/*}/items", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = section.ListSectionItemsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseChatServiceRestTransport._BaseListSectionItems._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListSections: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=users/*}/sections", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = section.ListSectionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseChatServiceRestTransport._BaseListSections._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseListSpaceEvents: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1414,6 +1614,120 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseMoveSectionItem: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=users/*/sections/*/items/*}:move", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = section.MoveSectionItemRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseChatServiceRestTransport._BaseMoveSectionItem._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BasePositionSection: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=users/*/sections/*}:position", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = section.PositionSectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseChatServiceRestTransport._BasePositionSection._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseSearchSpaces: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1643,6 +1957,65 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseUpdateSection: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{section.name=users/*/sections/*}", + "body": "section", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gc_section.UpdateSectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseChatServiceRestTransport._BaseUpdateSection._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseUpdateSpace: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py b/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py index 5d3e0c542903..c68178f3e922 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py @@ -114,6 +114,21 @@ ListReactionsResponse, Reaction, ) +from .section import ( + CreateSectionRequest, + DeleteSectionRequest, + ListSectionItemsRequest, + ListSectionItemsResponse, + ListSectionsRequest, + ListSectionsResponse, + MoveSectionItemRequest, + MoveSectionItemResponse, + PositionSectionRequest, + PositionSectionResponse, + Section, + SectionItem, + UpdateSectionRequest, +) from .slash_command import ( SlashCommand, ) @@ -239,6 +254,19 @@ "ListReactionsRequest", "ListReactionsResponse", "Reaction", + "CreateSectionRequest", + "DeleteSectionRequest", + "ListSectionItemsRequest", + "ListSectionItemsResponse", + "ListSectionsRequest", + "ListSectionsResponse", + "MoveSectionItemRequest", + "MoveSectionItemResponse", + "PositionSectionRequest", + "PositionSectionResponse", + "Section", + "SectionItem", + "UpdateSectionRequest", "SlashCommand", "CompleteImportSpaceRequest", "CompleteImportSpaceResponse", diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/section.py b/packages/google-apps-chat/google/apps/chat_v1/types/section.py new file mode 100644 index 000000000000..37f790cdfbb5 --- /dev/null +++ b/packages/google-apps-chat/google/apps/chat_v1/types/section.py @@ -0,0 +1,523 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.chat.v1", + manifest={ + "Section", + "SectionItem", + "CreateSectionRequest", + "DeleteSectionRequest", + "UpdateSectionRequest", + "ListSectionsRequest", + "ListSectionsResponse", + "PositionSectionRequest", + "PositionSectionResponse", + "ListSectionItemsRequest", + "ListSectionItemsResponse", + "MoveSectionItemRequest", + "MoveSectionItemResponse", + }, +) + + +class Section(proto.Message): + r"""Represents a + `section `__ in + Google Chat. Sections help users organize their spaces. There are + two types of sections: + + 1. **System Sections:** These are predefined sections managed by + Google Chat. Their resource names are fixed, and they cannot be + created, deleted, or have their ``display_name`` modified. + Examples include: + + - ``users/{user}/sections/default-direct-messages`` + - ``users/{user}/sections/default-spaces`` + - ``users/{user}/sections/default-apps`` + + 2. **Custom Sections:** These are sections created and managed by + the user. Creating a custom section using ``CreateSection`` + **requires** a ``display_name``. Custom sections can be updated + using ``UpdateSection`` and deleted using ``DeleteSection``. + + Attributes: + name (str): + Identifier. Resource name of the section. + + For system sections, the section ID is a constant string: + + - DEFAULT_DIRECT_MESSAGES: + ``users/{user}/sections/default-direct-messages`` + - DEFAULT_SPACES: ``users/{user}/sections/default-spaces`` + - DEFAULT_APPS: ``users/{user}/sections/default-apps`` + + Format: ``users/{user}/sections/{section}`` + display_name (str): + Optional. The section's display name. Only populated for + sections of type ``CUSTOM_SECTION``. Supports up to 80 + characters. Required when creating a ``CUSTOM_SECTION``. + sort_order (int): + Output only. The order of the section in relation to other + sections. Sections with a lower ``sort_order`` value appear + before sections with a higher value. + type_ (google.apps.chat_v1.types.Section.SectionType): + Required. The type of the section. + """ + + class SectionType(proto.Enum): + r"""Section types. + + Values: + SECTION_TYPE_UNSPECIFIED (0): + Unspecified section type. + CUSTOM_SECTION (1): + Custom section. + DEFAULT_DIRECT_MESSAGES (2): + Default section containing + `DIRECT_MESSAGE `__ + between two human users or + `GROUP_CHAT `__ + spaces that don't belong to any custom section. + DEFAULT_SPACES (3): + Default spaces that don't belong to any + custom section. + DEFAULT_APPS (6): + Default section containing a user's installed + apps. + """ + + SECTION_TYPE_UNSPECIFIED = 0 + CUSTOM_SECTION = 1 + DEFAULT_DIRECT_MESSAGES = 2 + DEFAULT_SPACES = 3 + DEFAULT_APPS = 6 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + sort_order: int = proto.Field( + proto.INT32, + number=3, + ) + type_: SectionType = proto.Field( + proto.ENUM, + number=4, + enum=SectionType, + ) + + +class SectionItem(proto.Message): + r"""A user's defined section item. This is used to represent + section items, such as spaces, grouped under a section. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The resource name of the section item. + + Format: ``users/{user}/sections/{section}/items/{item}`` + space (str): + Optional. The space resource name. + + Format: ``spaces/{space}`` + + This field is a member of `oneof`_ ``item``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + space: str = proto.Field( + proto.STRING, + number=2, + oneof="item", + ) + + +class CreateSectionRequest(proto.Message): + r"""Request message for creating a section. + + Attributes: + parent (str): + Required. The parent resource name where the section is + created. + + Format: ``users/{user}`` + section (google.apps.chat_v1.types.Section): + Required. The section to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + section: "Section" = proto.Field( + proto.MESSAGE, + number=2, + message="Section", + ) + + +class DeleteSectionRequest(proto.Message): + r"""Request message for deleting a section. `Developer + Preview `__. + + Attributes: + name (str): + Required. The name of the section to delete. + + Format: ``users/{user}/sections/{section}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateSectionRequest(proto.Message): + r"""Request message for updating a section. + + Attributes: + section (google.apps.chat_v1.types.Section): + Required. The section to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The mask to specify which fields to update. + + Currently supported field paths: + + - ``display_name`` + """ + + section: "Section" = proto.Field( + proto.MESSAGE, + number=1, + message="Section", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListSectionsRequest(proto.Message): + r"""Request message for listing sections. + + Attributes: + parent (str): + Required. The parent, which is the user resource name that + owns this collection of sections. Only supports listing + sections for the calling user. To refer to the calling user, + set one of the following: + + - The ``me`` alias. For example, ``users/me``. + + - Their Workspace email address. For example, + ``users/user@example.com``. + + - Their user id. For example, ``users/123456789``. + + Format: ``users/{user}`` + page_size (int): + Optional. The maximum number of sections to return. The + service may return fewer than this value. + + If unspecified, at most 10 sections will be returned. + + The maximum value is 100. If you use a value more than 100, + it's automatically changed to 100. + + Negative values return an ``INVALID_ARGUMENT`` error. + page_token (str): + Optional. A page token, received from a + previous list sections call. Provide this to + retrieve the subsequent page. + + When paginating, all other parameters provided + should match the call that provided the page + token. Passing different values to the other + parameters might lead to unexpected results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListSectionsResponse(proto.Message): + r"""Response message for listing sections. + + Attributes: + sections (MutableSequence[google.apps.chat_v1.types.Section]): + The sections from the specified user. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + sections: MutableSequence["Section"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Section", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class PositionSectionRequest(proto.Message): + r"""Request message for positioning a section. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The resource name of the section to position. + + Format: ``users/{user}/sections/{section}`` + sort_order (int): + Optional. The absolute position of the + section in the list of sections. The position + must be greater than 0. If the position is + greater than the number of sections, the section + will be appended to the end of the list. This + operation inserts the section at the given + position and shifts the original section at that + position, and those below it, to the next + position. + + This field is a member of `oneof`_ ``position``. + relative_position (google.apps.chat_v1.types.PositionSectionRequest.Position): + Optional. The relative position of the + section in the list of sections. + + This field is a member of `oneof`_ ``position``. + """ + + class Position(proto.Enum): + r"""The position of the section. + + Values: + POSITION_UNSPECIFIED (0): + Unspecified position. + START (1): + Start of the list of sections. + END (2): + End of the list of sections. + """ + + POSITION_UNSPECIFIED = 0 + START = 1 + END = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + sort_order: int = proto.Field( + proto.INT32, + number=2, + oneof="position", + ) + relative_position: Position = proto.Field( + proto.ENUM, + number=3, + oneof="position", + enum=Position, + ) + + +class PositionSectionResponse(proto.Message): + r"""Response message for positioning a section. + + Attributes: + section (google.apps.chat_v1.types.Section): + The updated section. + """ + + section: "Section" = proto.Field( + proto.MESSAGE, + number=1, + message="Section", + ) + + +class ListSectionItemsRequest(proto.Message): + r"""Request message for listing section items. + + Attributes: + parent (str): + Required. The parent, which is the section resource name + that owns this collection of section items. Only supports + listing section items for the calling user. + + When you're filtering by space, use the wildcard ``-`` to + search across all sections. For example, + ``users/{user}/sections/-``. + + Format: ``users/{user}/sections/{section}`` + page_size (int): + Optional. The maximum number of section items to return. The + service may return fewer than this value. + + If unspecified, at most 10 section items will be returned. + + The maximum value is 100. If you use a value more than 100, + it's automatically changed to 100. + + Negative values return an ``INVALID_ARGUMENT`` error. + page_token (str): + Optional. A page token, received from a + previous list section items call. Provide this + to retrieve the subsequent page. + + When paginating, all other parameters provided + should match the call that provided the page + token. Passing different values to the other + parameters might lead to unexpected results. + filter (str): + Optional. A query filter. + + Currently only supports filtering by space. + + For example, ``space = spaces/{space}``. + + Invalid queries are rejected with an ``INVALID_ARGUMENT`` + error. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListSectionItemsResponse(proto.Message): + r"""Response message for listing section items. + + Attributes: + section_items (MutableSequence[google.apps.chat_v1.types.SectionItem]): + The section items from the specified section. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + section_items: MutableSequence["SectionItem"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SectionItem", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class MoveSectionItemRequest(proto.Message): + r"""Request message for moving a section item across sections. + + Attributes: + name (str): + Required. The resource name of the section item to move. + + Format: ``users/{user}/sections/{section}/items/{item}`` + target_section (str): + Required. The resource name of the section to move the + section item to. + + Format: ``users/{user}/sections/{section}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + target_section: str = proto.Field( + proto.STRING, + number=2, + ) + + +class MoveSectionItemResponse(proto.Message): + r"""Response message for moving a section item. + + Attributes: + section_item (google.apps.chat_v1.types.SectionItem): + The updated section item. + """ + + section_item: "SectionItem" = proto.Field( + proto.MESSAGE, + number=1, + message="SectionItem", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_section_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_section_async.py new file mode 100644 index 000000000000..2bc63ec45363 --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_section_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_CreateSection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +async def sample_create_section(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + section = chat_v1.Section() + section.type_ = "DEFAULT_APPS" + + request = chat_v1.CreateSectionRequest( + parent="parent_value", + section=section, + ) + + # Make the request + response = await client.create_section(request=request) + + # Handle the response + print(response) + + +# [END chat_v1_generated_ChatService_CreateSection_async] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_section_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_section_sync.py new file mode 100644 index 000000000000..92f67a4c69ac --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_section_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_CreateSection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +def sample_create_section(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + section = chat_v1.Section() + section.type_ = "DEFAULT_APPS" + + request = chat_v1.CreateSectionRequest( + parent="parent_value", + section=section, + ) + + # Make the request + response = client.create_section(request=request) + + # Handle the response + print(response) + + +# [END chat_v1_generated_ChatService_CreateSection_sync] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_delete_section_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_delete_section_async.py new file mode 100644 index 000000000000..d9df29b7bb49 --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_delete_section_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_DeleteSection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +async def sample_delete_section(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.DeleteSectionRequest( + name="name_value", + ) + + # Make the request + await client.delete_section(request=request) + + +# [END chat_v1_generated_ChatService_DeleteSection_async] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_delete_section_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_delete_section_sync.py new file mode 100644 index 000000000000..3b7748743494 --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_delete_section_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_DeleteSection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +def sample_delete_section(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.DeleteSectionRequest( + name="name_value", + ) + + # Make the request + client.delete_section(request=request) + + +# [END chat_v1_generated_ChatService_DeleteSection_sync] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_list_section_items_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_list_section_items_async.py new file mode 100644 index 000000000000..632f2f5019dc --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_list_section_items_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSectionItems +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_ListSectionItems_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +async def sample_list_section_items(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.ListSectionItemsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_section_items(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END chat_v1_generated_ChatService_ListSectionItems_async] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_list_section_items_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_list_section_items_sync.py new file mode 100644 index 000000000000..a6519c8b7c40 --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_list_section_items_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSectionItems +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_ListSectionItems_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +def sample_list_section_items(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.ListSectionItemsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_section_items(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END chat_v1_generated_ChatService_ListSectionItems_sync] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_list_sections_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_list_sections_async.py new file mode 100644 index 000000000000..0c437bec81f2 --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_list_sections_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_ListSections_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +async def sample_list_sections(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.ListSectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sections(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END chat_v1_generated_ChatService_ListSections_async] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_list_sections_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_list_sections_sync.py new file mode 100644 index 000000000000..88df3a5f0719 --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_list_sections_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_ListSections_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +def sample_list_sections(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.ListSectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sections(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END chat_v1_generated_ChatService_ListSections_sync] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_move_section_item_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_move_section_item_async.py new file mode 100644 index 000000000000..951988ca6628 --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_move_section_item_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MoveSectionItem +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_MoveSectionItem_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +async def sample_move_section_item(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.MoveSectionItemRequest( + name="name_value", + target_section="target_section_value", + ) + + # Make the request + response = await client.move_section_item(request=request) + + # Handle the response + print(response) + + +# [END chat_v1_generated_ChatService_MoveSectionItem_async] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_move_section_item_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_move_section_item_sync.py new file mode 100644 index 000000000000..558363020b2a --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_move_section_item_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MoveSectionItem +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_MoveSectionItem_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +def sample_move_section_item(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.MoveSectionItemRequest( + name="name_value", + target_section="target_section_value", + ) + + # Make the request + response = client.move_section_item(request=request) + + # Handle the response + print(response) + + +# [END chat_v1_generated_ChatService_MoveSectionItem_sync] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_position_section_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_position_section_async.py new file mode 100644 index 000000000000..1d9dfb1dcdec --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_position_section_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PositionSection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_PositionSection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +async def sample_position_section(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.PositionSectionRequest( + sort_order=1091, + name="name_value", + ) + + # Make the request + response = await client.position_section(request=request) + + # Handle the response + print(response) + + +# [END chat_v1_generated_ChatService_PositionSection_async] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_position_section_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_position_section_sync.py new file mode 100644 index 000000000000..71efade6b5d7 --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_position_section_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PositionSection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_PositionSection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +def sample_position_section(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.PositionSectionRequest( + sort_order=1091, + name="name_value", + ) + + # Make the request + response = client.position_section(request=request) + + # Handle the response + print(response) + + +# [END chat_v1_generated_ChatService_PositionSection_sync] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_section_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_section_async.py new file mode 100644 index 000000000000..2f362ad74996 --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_section_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_UpdateSection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +async def sample_update_section(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + section = chat_v1.Section() + section.type_ = "DEFAULT_APPS" + + request = chat_v1.UpdateSectionRequest( + section=section, + ) + + # Make the request + response = await client.update_section(request=request) + + # Handle the response + print(response) + + +# [END chat_v1_generated_ChatService_UpdateSection_async] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_section_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_section_sync.py new file mode 100644 index 000000000000..075c86c341e5 --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_section_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_UpdateSection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +def sample_update_section(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + section = chat_v1.Section() + section.type_ = "DEFAULT_APPS" + + request = chat_v1.UpdateSectionRequest( + section=section, + ) + + # Make the request + response = client.update_section(request=request) + + # Handle the response + print(response) + + +# [END chat_v1_generated_ChatService_UpdateSection_sync] diff --git a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json index b22c6e22ce99..ecff63aa6c50 100644 --- a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json +++ b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json @@ -840,6 +840,175 @@ ], "title": "chat_v1_generated_chat_service_create_reaction_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient", + "shortName": "ChatServiceAsyncClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.create_section", + "method": { + "fullName": "google.chat.v1.ChatService.CreateSection", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "CreateSection" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.CreateSectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "section", + "type": "google.apps.chat_v1.types.Section" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.chat_v1.types.Section", + "shortName": "create_section" + }, + "description": "Sample for CreateSection", + "file": "chat_v1_generated_chat_service_create_section_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_CreateSection_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_create_section_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.chat_v1.ChatServiceClient", + "shortName": "ChatServiceClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceClient.create_section", + "method": { + "fullName": "google.chat.v1.ChatService.CreateSection", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "CreateSection" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.CreateSectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "section", + "type": "google.apps.chat_v1.types.Section" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.chat_v1.types.Section", + "shortName": "create_section" + }, + "description": "Sample for CreateSection", + "file": "chat_v1_generated_chat_service_create_section_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_CreateSection_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_create_section_sync.py" + }, { "canonical": true, "clientMethod": { @@ -1635,19 +1804,19 @@ "fullName": "google.apps.chat_v1.ChatServiceAsyncClient", "shortName": "ChatServiceAsyncClient" }, - "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.delete_space", + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.delete_section", "method": { - "fullName": "google.chat.v1.ChatService.DeleteSpace", + "fullName": "google.chat.v1.ChatService.DeleteSection", "service": { "fullName": "google.chat.v1.ChatService", "shortName": "ChatService" }, - "shortName": "DeleteSpace" + "shortName": "DeleteSection" }, "parameters": [ { "name": "request", - "type": "google.apps.chat_v1.types.DeleteSpaceRequest" + "type": "google.apps.chat_v1.types.DeleteSectionRequest" }, { "name": "name", @@ -1666,13 +1835,13 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "delete_space" + "shortName": "delete_section" }, - "description": "Sample for DeleteSpace", - "file": "chat_v1_generated_chat_service_delete_space_async.py", + "description": "Sample for DeleteSection", + "file": "chat_v1_generated_chat_service_delete_section_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "chat_v1_generated_ChatService_DeleteSpace_async", + "regionTag": "chat_v1_generated_ChatService_DeleteSection_async", "segments": [ { "end": 49, @@ -1703,7 +1872,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "chat_v1_generated_chat_service_delete_space_async.py" + "title": "chat_v1_generated_chat_service_delete_section_async.py" }, { "canonical": true, @@ -1712,19 +1881,19 @@ "fullName": "google.apps.chat_v1.ChatServiceClient", "shortName": "ChatServiceClient" }, - "fullName": "google.apps.chat_v1.ChatServiceClient.delete_space", + "fullName": "google.apps.chat_v1.ChatServiceClient.delete_section", "method": { - "fullName": "google.chat.v1.ChatService.DeleteSpace", + "fullName": "google.chat.v1.ChatService.DeleteSection", "service": { "fullName": "google.chat.v1.ChatService", "shortName": "ChatService" }, - "shortName": "DeleteSpace" + "shortName": "DeleteSection" }, "parameters": [ { "name": "request", - "type": "google.apps.chat_v1.types.DeleteSpaceRequest" + "type": "google.apps.chat_v1.types.DeleteSectionRequest" }, { "name": "name", @@ -1743,13 +1912,13 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "delete_space" + "shortName": "delete_section" }, - "description": "Sample for DeleteSpace", - "file": "chat_v1_generated_chat_service_delete_space_sync.py", + "description": "Sample for DeleteSection", + "file": "chat_v1_generated_chat_service_delete_section_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "chat_v1_generated_ChatService_DeleteSpace_sync", + "regionTag": "chat_v1_generated_ChatService_DeleteSection_sync", "segments": [ { "end": 49, @@ -1780,7 +1949,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "chat_v1_generated_chat_service_delete_space_sync.py" + "title": "chat_v1_generated_chat_service_delete_section_sync.py" }, { "canonical": true, @@ -1790,19 +1959,23 @@ "fullName": "google.apps.chat_v1.ChatServiceAsyncClient", "shortName": "ChatServiceAsyncClient" }, - "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.find_direct_message", + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.delete_space", "method": { - "fullName": "google.chat.v1.ChatService.FindDirectMessage", + "fullName": "google.chat.v1.ChatService.DeleteSpace", "service": { "fullName": "google.chat.v1.ChatService", "shortName": "ChatService" }, - "shortName": "FindDirectMessage" + "shortName": "DeleteSpace" }, "parameters": [ { "name": "request", - "type": "google.apps.chat_v1.types.FindDirectMessageRequest" + "type": "google.apps.chat_v1.types.DeleteSpaceRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1817,22 +1990,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.apps.chat_v1.types.Space", - "shortName": "find_direct_message" + "shortName": "delete_space" }, - "description": "Sample for FindDirectMessage", - "file": "chat_v1_generated_chat_service_find_direct_message_async.py", + "description": "Sample for DeleteSpace", + "file": "chat_v1_generated_chat_service_delete_space_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "chat_v1_generated_ChatService_FindDirectMessage_async", + "regionTag": "chat_v1_generated_ChatService_DeleteSpace_async", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1847,17 +2019,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "chat_v1_generated_chat_service_find_direct_message_async.py" + "title": "chat_v1_generated_chat_service_delete_space_async.py" }, { "canonical": true, @@ -1866,19 +2036,23 @@ "fullName": "google.apps.chat_v1.ChatServiceClient", "shortName": "ChatServiceClient" }, - "fullName": "google.apps.chat_v1.ChatServiceClient.find_direct_message", + "fullName": "google.apps.chat_v1.ChatServiceClient.delete_space", "method": { - "fullName": "google.chat.v1.ChatService.FindDirectMessage", + "fullName": "google.chat.v1.ChatService.DeleteSpace", "service": { "fullName": "google.chat.v1.ChatService", "shortName": "ChatService" }, - "shortName": "FindDirectMessage" + "shortName": "DeleteSpace" }, "parameters": [ { "name": "request", - "type": "google.apps.chat_v1.types.FindDirectMessageRequest" + "type": "google.apps.chat_v1.types.DeleteSpaceRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1893,22 +2067,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.apps.chat_v1.types.Space", - "shortName": "find_direct_message" + "shortName": "delete_space" }, - "description": "Sample for FindDirectMessage", - "file": "chat_v1_generated_chat_service_find_direct_message_sync.py", + "description": "Sample for DeleteSpace", + "file": "chat_v1_generated_chat_service_delete_space_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "chat_v1_generated_ChatService_FindDirectMessage_sync", + "regionTag": "chat_v1_generated_ChatService_DeleteSpace_sync", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1923,17 +2096,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "chat_v1_generated_chat_service_find_direct_message_sync.py" + "title": "chat_v1_generated_chat_service_delete_space_sync.py" }, { "canonical": true, @@ -1943,23 +2114,19 @@ "fullName": "google.apps.chat_v1.ChatServiceAsyncClient", "shortName": "ChatServiceAsyncClient" }, - "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.get_attachment", + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.find_direct_message", "method": { - "fullName": "google.chat.v1.ChatService.GetAttachment", + "fullName": "google.chat.v1.ChatService.FindDirectMessage", "service": { "fullName": "google.chat.v1.ChatService", "shortName": "ChatService" }, - "shortName": "GetAttachment" + "shortName": "FindDirectMessage" }, "parameters": [ { "name": "request", - "type": "google.apps.chat_v1.types.GetAttachmentRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.apps.chat_v1.types.FindDirectMessageRequest" }, { "name": "retry", @@ -1974,14 +2141,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.apps.chat_v1.types.Attachment", - "shortName": "get_attachment" + "resultType": "google.apps.chat_v1.types.Space", + "shortName": "find_direct_message" }, - "description": "Sample for GetAttachment", - "file": "chat_v1_generated_chat_service_get_attachment_async.py", + "description": "Sample for FindDirectMessage", + "file": "chat_v1_generated_chat_service_find_direct_message_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "chat_v1_generated_ChatService_GetAttachment_async", + "regionTag": "chat_v1_generated_ChatService_FindDirectMessage_async", "segments": [ { "end": 51, @@ -2014,7 +2181,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "chat_v1_generated_chat_service_get_attachment_async.py" + "title": "chat_v1_generated_chat_service_find_direct_message_async.py" }, { "canonical": true, @@ -2023,19 +2190,176 @@ "fullName": "google.apps.chat_v1.ChatServiceClient", "shortName": "ChatServiceClient" }, - "fullName": "google.apps.chat_v1.ChatServiceClient.get_attachment", + "fullName": "google.apps.chat_v1.ChatServiceClient.find_direct_message", "method": { - "fullName": "google.chat.v1.ChatService.GetAttachment", + "fullName": "google.chat.v1.ChatService.FindDirectMessage", "service": { "fullName": "google.chat.v1.ChatService", "shortName": "ChatService" }, - "shortName": "GetAttachment" + "shortName": "FindDirectMessage" }, "parameters": [ { "name": "request", - "type": "google.apps.chat_v1.types.GetAttachmentRequest" + "type": "google.apps.chat_v1.types.FindDirectMessageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.chat_v1.types.Space", + "shortName": "find_direct_message" + }, + "description": "Sample for FindDirectMessage", + "file": "chat_v1_generated_chat_service_find_direct_message_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_FindDirectMessage_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_find_direct_message_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient", + "shortName": "ChatServiceAsyncClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.get_attachment", + "method": { + "fullName": "google.chat.v1.ChatService.GetAttachment", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "GetAttachment" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.GetAttachmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.chat_v1.types.Attachment", + "shortName": "get_attachment" + }, + "description": "Sample for GetAttachment", + "file": "chat_v1_generated_chat_service_get_attachment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_GetAttachment_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_get_attachment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.chat_v1.ChatServiceClient", + "shortName": "ChatServiceClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceClient.get_attachment", + "method": { + "fullName": "google.chat.v1.ChatService.GetAttachment", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "GetAttachment" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.GetAttachmentRequest" }, { "name": "name", @@ -4028,28 +4352,24 @@ "fullName": "google.apps.chat_v1.ChatServiceAsyncClient", "shortName": "ChatServiceAsyncClient" }, - "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.list_space_events", + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.list_section_items", "method": { - "fullName": "google.chat.v1.ChatService.ListSpaceEvents", + "fullName": "google.chat.v1.ChatService.ListSectionItems", "service": { "fullName": "google.chat.v1.ChatService", "shortName": "ChatService" }, - "shortName": "ListSpaceEvents" + "shortName": "ListSectionItems" }, "parameters": [ { "name": "request", - "type": "google.apps.chat_v1.types.ListSpaceEventsRequest" + "type": "google.apps.chat_v1.types.ListSectionItemsRequest" }, { "name": "parent", "type": "str" }, - { - "name": "filter", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -4063,22 +4383,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.apps.chat_v1.services.chat_service.pagers.ListSpaceEventsAsyncPager", - "shortName": "list_space_events" + "resultType": "google.apps.chat_v1.services.chat_service.pagers.ListSectionItemsAsyncPager", + "shortName": "list_section_items" }, - "description": "Sample for ListSpaceEvents", - "file": "chat_v1_generated_chat_service_list_space_events_async.py", + "description": "Sample for ListSectionItems", + "file": "chat_v1_generated_chat_service_list_section_items_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "chat_v1_generated_ChatService_ListSpaceEvents_async", + "regionTag": "chat_v1_generated_ChatService_ListSectionItems_async", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -4088,22 +4408,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 50, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "chat_v1_generated_chat_service_list_space_events_async.py" + "title": "chat_v1_generated_chat_service_list_section_items_async.py" }, { "canonical": true, @@ -4112,28 +4432,24 @@ "fullName": "google.apps.chat_v1.ChatServiceClient", "shortName": "ChatServiceClient" }, - "fullName": "google.apps.chat_v1.ChatServiceClient.list_space_events", + "fullName": "google.apps.chat_v1.ChatServiceClient.list_section_items", "method": { - "fullName": "google.chat.v1.ChatService.ListSpaceEvents", + "fullName": "google.chat.v1.ChatService.ListSectionItems", "service": { "fullName": "google.chat.v1.ChatService", "shortName": "ChatService" }, - "shortName": "ListSpaceEvents" + "shortName": "ListSectionItems" }, "parameters": [ { "name": "request", - "type": "google.apps.chat_v1.types.ListSpaceEventsRequest" + "type": "google.apps.chat_v1.types.ListSectionItemsRequest" }, { "name": "parent", "type": "str" }, - { - "name": "filter", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -4147,22 +4463,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.apps.chat_v1.services.chat_service.pagers.ListSpaceEventsPager", - "shortName": "list_space_events" + "resultType": "google.apps.chat_v1.services.chat_service.pagers.ListSectionItemsPager", + "shortName": "list_section_items" }, - "description": "Sample for ListSpaceEvents", - "file": "chat_v1_generated_chat_service_list_space_events_sync.py", + "description": "Sample for ListSectionItems", + "file": "chat_v1_generated_chat_service_list_section_items_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "chat_v1_generated_ChatService_ListSpaceEvents_sync", + "regionTag": "chat_v1_generated_ChatService_ListSectionItems_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -4172,22 +4488,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 50, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "chat_v1_generated_chat_service_list_space_events_sync.py" + "title": "chat_v1_generated_chat_service_list_section_items_sync.py" }, { "canonical": true, @@ -4197,19 +4513,23 @@ "fullName": "google.apps.chat_v1.ChatServiceAsyncClient", "shortName": "ChatServiceAsyncClient" }, - "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.list_spaces", + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.list_sections", "method": { - "fullName": "google.chat.v1.ChatService.ListSpaces", + "fullName": "google.chat.v1.ChatService.ListSections", "service": { "fullName": "google.chat.v1.ChatService", "shortName": "ChatService" }, - "shortName": "ListSpaces" + "shortName": "ListSections" }, "parameters": [ { "name": "request", - "type": "google.apps.chat_v1.types.ListSpacesRequest" + "type": "google.apps.chat_v1.types.ListSectionsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -4224,22 +4544,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.apps.chat_v1.services.chat_service.pagers.ListSpacesAsyncPager", - "shortName": "list_spaces" + "resultType": "google.apps.chat_v1.services.chat_service.pagers.ListSectionsAsyncPager", + "shortName": "list_sections" }, - "description": "Sample for ListSpaces", - "file": "chat_v1_generated_chat_service_list_spaces_async.py", + "description": "Sample for ListSections", + "file": "chat_v1_generated_chat_service_list_sections_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "chat_v1_generated_ChatService_ListSpaces_async", + "regionTag": "chat_v1_generated_ChatService_ListSections_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -4249,22 +4569,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 48, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "chat_v1_generated_chat_service_list_spaces_async.py" + "title": "chat_v1_generated_chat_service_list_sections_async.py" }, { "canonical": true, @@ -4273,19 +4593,23 @@ "fullName": "google.apps.chat_v1.ChatServiceClient", "shortName": "ChatServiceClient" }, - "fullName": "google.apps.chat_v1.ChatServiceClient.list_spaces", + "fullName": "google.apps.chat_v1.ChatServiceClient.list_sections", "method": { - "fullName": "google.chat.v1.ChatService.ListSpaces", + "fullName": "google.chat.v1.ChatService.ListSections", "service": { "fullName": "google.chat.v1.ChatService", "shortName": "ChatService" }, - "shortName": "ListSpaces" + "shortName": "ListSections" }, "parameters": [ { "name": "request", - "type": "google.apps.chat_v1.types.ListSpacesRequest" + "type": "google.apps.chat_v1.types.ListSectionsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -4300,22 +4624,590 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.apps.chat_v1.services.chat_service.pagers.ListSpacesPager", - "shortName": "list_spaces" + "resultType": "google.apps.chat_v1.services.chat_service.pagers.ListSectionsPager", + "shortName": "list_sections" }, - "description": "Sample for ListSpaces", - "file": "chat_v1_generated_chat_service_list_spaces_sync.py", + "description": "Sample for ListSections", + "file": "chat_v1_generated_chat_service_list_sections_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "chat_v1_generated_ChatService_ListSpaces_sync", + "regionTag": "chat_v1_generated_ChatService_ListSections_sync", "segments": [ { - "end": 51, + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_list_sections_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient", + "shortName": "ChatServiceAsyncClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.list_space_events", + "method": { + "fullName": "google.chat.v1.ChatService.ListSpaceEvents", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "ListSpaceEvents" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.ListSpaceEventsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.chat_v1.services.chat_service.pagers.ListSpaceEventsAsyncPager", + "shortName": "list_space_events" + }, + "description": "Sample for ListSpaceEvents", + "file": "chat_v1_generated_chat_service_list_space_events_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_ListSpaceEvents_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_list_space_events_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.chat_v1.ChatServiceClient", + "shortName": "ChatServiceClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceClient.list_space_events", + "method": { + "fullName": "google.chat.v1.ChatService.ListSpaceEvents", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "ListSpaceEvents" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.ListSpaceEventsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.chat_v1.services.chat_service.pagers.ListSpaceEventsPager", + "shortName": "list_space_events" + }, + "description": "Sample for ListSpaceEvents", + "file": "chat_v1_generated_chat_service_list_space_events_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_ListSpaceEvents_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_list_space_events_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient", + "shortName": "ChatServiceAsyncClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.list_spaces", + "method": { + "fullName": "google.chat.v1.ChatService.ListSpaces", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "ListSpaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.ListSpacesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.chat_v1.services.chat_service.pagers.ListSpacesAsyncPager", + "shortName": "list_spaces" + }, + "description": "Sample for ListSpaces", + "file": "chat_v1_generated_chat_service_list_spaces_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_ListSpaces_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_list_spaces_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.chat_v1.ChatServiceClient", + "shortName": "ChatServiceClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceClient.list_spaces", + "method": { + "fullName": "google.chat.v1.ChatService.ListSpaces", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "ListSpaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.ListSpacesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.chat_v1.services.chat_service.pagers.ListSpacesPager", + "shortName": "list_spaces" + }, + "description": "Sample for ListSpaces", + "file": "chat_v1_generated_chat_service_list_spaces_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_ListSpaces_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_list_spaces_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient", + "shortName": "ChatServiceAsyncClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.move_section_item", + "method": { + "fullName": "google.chat.v1.ChatService.MoveSectionItem", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "MoveSectionItem" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.MoveSectionItemRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "target_section", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.chat_v1.types.MoveSectionItemResponse", + "shortName": "move_section_item" + }, + "description": "Sample for MoveSectionItem", + "file": "chat_v1_generated_chat_service_move_section_item_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_MoveSectionItem_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_move_section_item_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.chat_v1.ChatServiceClient", + "shortName": "ChatServiceClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceClient.move_section_item", + "method": { + "fullName": "google.chat.v1.ChatService.MoveSectionItem", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "MoveSectionItem" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.MoveSectionItemRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "target_section", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.chat_v1.types.MoveSectionItemResponse", + "shortName": "move_section_item" + }, + "description": "Sample for MoveSectionItem", + "file": "chat_v1_generated_chat_service_move_section_item_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_MoveSectionItem_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_move_section_item_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient", + "shortName": "ChatServiceAsyncClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.position_section", + "method": { + "fullName": "google.chat.v1.ChatService.PositionSection", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "PositionSection" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.PositionSectionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.chat_v1.types.PositionSectionResponse", + "shortName": "position_section" + }, + "description": "Sample for PositionSection", + "file": "chat_v1_generated_chat_service_position_section_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_PositionSection_async", + "segments": [ + { + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -4325,22 +5217,98 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_position_section_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.chat_v1.ChatServiceClient", + "shortName": "ChatServiceClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceClient.position_section", + "method": { + "fullName": "google.chat.v1.ChatService.PositionSection", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "PositionSection" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.PositionSectionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.chat_v1.types.PositionSectionResponse", + "shortName": "position_section" + }, + "description": "Sample for PositionSection", + "file": "chat_v1_generated_chat_service_position_section_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_PositionSection_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, { "end": 52, - "start": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "chat_v1_generated_chat_service_list_spaces_sync.py" + "title": "chat_v1_generated_chat_service_position_section_sync.py" }, { "canonical": true, @@ -4986,6 +5954,175 @@ ], "title": "chat_v1_generated_chat_service_update_message_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient", + "shortName": "ChatServiceAsyncClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.update_section", + "method": { + "fullName": "google.chat.v1.ChatService.UpdateSection", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "UpdateSection" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.UpdateSectionRequest" + }, + { + "name": "section", + "type": "google.apps.chat_v1.types.Section" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.chat_v1.types.Section", + "shortName": "update_section" + }, + "description": "Sample for UpdateSection", + "file": "chat_v1_generated_chat_service_update_section_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_UpdateSection_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_update_section_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.chat_v1.ChatServiceClient", + "shortName": "ChatServiceClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceClient.update_section", + "method": { + "fullName": "google.chat.v1.ChatService.UpdateSection", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "UpdateSection" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.UpdateSectionRequest" + }, + { + "name": "section", + "type": "google.apps.chat_v1.types.Section" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.chat_v1.types.Section", + "shortName": "update_section" + }, + "description": "Sample for UpdateSection", + "file": "chat_v1_generated_chat_service_update_section_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_UpdateSection_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_update_section_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py index 7fc63f420f9b..95ed14043d74 100644 --- a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py +++ b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py @@ -82,6 +82,7 @@ membership, message, reaction, + section, slash_command, space, space_event, @@ -95,6 +96,7 @@ from google.apps.chat_v1.types import membership as gc_membership from google.apps.chat_v1.types import message as gc_message from google.apps.chat_v1.types import reaction as gc_reaction +from google.apps.chat_v1.types import section as gc_section from google.apps.chat_v1.types import space as gc_space from google.apps.chat_v1.types import ( space_notification_setting as gc_space_notification_setting, @@ -1308,9 +1310,11 @@ def test_chat_service_client_create_channel_credentials_file( "https://www.googleapis.com/auth/chat.admin.spaces.readonly", "https://www.googleapis.com/auth/chat.app.delete", "https://www.googleapis.com/auth/chat.app.memberships", + "https://www.googleapis.com/auth/chat.app.memberships.readonly", "https://www.googleapis.com/auth/chat.app.messages.readonly", "https://www.googleapis.com/auth/chat.app.spaces", "https://www.googleapis.com/auth/chat.app.spaces.create", + "https://www.googleapis.com/auth/chat.app.spaces.readonly", "https://www.googleapis.com/auth/chat.bot", "https://www.googleapis.com/auth/chat.customemojis", "https://www.googleapis.com/auth/chat.customemojis.readonly", @@ -1330,6 +1334,8 @@ def test_chat_service_client_create_channel_credentials_file( "https://www.googleapis.com/auth/chat.spaces.readonly", "https://www.googleapis.com/auth/chat.users.readstate", "https://www.googleapis.com/auth/chat.users.readstate.readonly", + "https://www.googleapis.com/auth/chat.users.sections", + "https://www.googleapis.com/auth/chat.users.sections.readonly", "https://www.googleapis.com/auth/chat.users.spacesettings", ), scopes=None, @@ -13769,13 +13775,83 @@ async def test_update_space_notification_setting_flattened_error_async(): ) -def test_create_message_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize( + "request_type", + [ + gc_section.CreateSectionRequest, + dict, + ], +) +def test_create_section(request_type, transport: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gc_section.Section( + name="name_value", + display_name="display_name_value", + sort_order=1091, + type_=gc_section.Section.SectionType.CUSTOM_SECTION, + ) + response = client.create_section(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gc_section.CreateSectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gc_section.Section) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.sort_order == 1091 + assert response.type_ == gc_section.Section.SectionType.CUSTOM_SECTION + + +def test_create_section_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gc_section.CreateSectionRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_section), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_section(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gc_section.CreateSectionRequest( + parent="parent_value", + ) + + +def test_create_section_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -13783,204 +13859,331 @@ def test_create_message_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_message in client._transport._wrapped_methods + assert client._transport.create_section in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_message] = mock_rpc - + client._transport._wrapped_methods[client._transport.create_section] = mock_rpc request = {} - client.create_message(request) + client.create_section(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_message(request) + client.create_section(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_message_rest_required_fields( - request_type=gc_message.CreateMessageRequest, +@pytest.mark.asyncio +async def test_create_section_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ChatServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_message._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify required fields with default values are now present + # Ensure method has been cached + assert ( + client._client._transport.create_section + in client._client._transport._wrapped_methods + ) - jsonified_request["parent"] = "parent_value" + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_section + ] = mock_rpc - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_message._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "message_id", - "message_reply_option", - "request_id", - "thread_key", - ) - ) - jsonified_request.update(unset_fields) + request = {} + await client.create_section(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + await client.create_section(request) - # Designate an appropriate value for the returned response. - return_value = gc_message.Message() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gc_message.Message.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) +@pytest.mark.asyncio +async def test_create_section_async( + transport: str = "grpc_asyncio", request_type=gc_section.CreateSectionRequest +): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.create_message(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gc_section.Section( + name="name_value", + display_name="display_name_value", + sort_order=1091, + type_=gc_section.Section.SectionType.CUSTOM_SECTION, + ) + ) + response = await client.create_section(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gc_section.CreateSectionRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, gc_section.Section) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.sort_order == 1091 + assert response.type_ == gc_section.Section.SectionType.CUSTOM_SECTION -def test_create_message_rest_unset_required_fields(): - transport = transports.ChatServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.create_message._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "messageId", - "messageReplyOption", - "requestId", - "threadKey", - ) - ) - & set( - ( - "parent", - "message", - ) - ) - ) +@pytest.mark.asyncio +async def test_create_section_async_from_dict(): + await test_create_section_async(request_type=dict) -def test_create_message_rest_flattened(): +def test_create_section_field_headers(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gc_message.Message() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gc_section.CreateSectionRequest() - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "spaces/sample1"} + request.parent = "parent_value" - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - message=gc_message.Message(name="name_value"), - message_id="message_id_value", - ) - mock_args.update(sample_request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_section), "__call__") as call: + call.return_value = gc_section.Section() + client.create_section(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gc_message.Message.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.create_message(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=spaces/*}/messages" % client.transport._host, args[1] - ) +@pytest.mark.asyncio +async def test_create_section_field_headers_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) -def test_create_message_rest_flattened_error(transport: str = "rest"): - client = ChatServiceClient( + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gc_section.CreateSectionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_section), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gc_section.Section()) + await client.create_section(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_section_flattened(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gc_section.Section() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_section( + parent="parent_value", + section=gc_section.Section(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].section + mock_val = gc_section.Section(name="name_value") + assert arg == mock_val + + +def test_create_section_flattened_error(): + client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_message( - gc_message.CreateMessageRequest(), + client.create_section( + gc_section.CreateSectionRequest(), parent="parent_value", - message=gc_message.Message(name="name_value"), - message_id="message_id_value", + section=gc_section.Section(name="name_value"), ) -def test_list_messages_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_create_section_flattened_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gc_section.Section() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gc_section.Section()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_section( + parent="parent_value", + section=gc_section.Section(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].section + mock_val = gc_section.Section(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_section_flattened_error_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_section( + gc_section.CreateSectionRequest(), + parent="parent_value", + section=gc_section.Section(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + section.DeleteSectionRequest, + dict, + ], +) +def test_delete_section(request_type, transport: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_section(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = section.DeleteSectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_section_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = section.DeleteSectionRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_section), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_section(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == section.DeleteSectionRequest( + name="name_value", + ) + + +def test_delete_section_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -13988,255 +14191,315 @@ def test_list_messages_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_messages in client._transport._wrapped_methods + assert client._transport.delete_section in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_messages] = mock_rpc - + client._transport._wrapped_methods[client._transport.delete_section] = mock_rpc request = {} - client.list_messages(request) + client.delete_section(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_messages(request) + client.delete_section(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_messages_rest_required_fields(request_type=message.ListMessagesRequest): - transport_class = transports.ChatServiceRestTransport +@pytest.mark.asyncio +async def test_delete_section_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.delete_section + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_messages._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_section + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.delete_section(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_messages._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - "show_deleted", - ) - ) - jsonified_request.update(unset_fields) + await client.delete_section(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + +@pytest.mark.asyncio +async def test_delete_section_async( + transport: str = "grpc_asyncio", request_type=section.DeleteSectionRequest +): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = message.ListMessagesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_section(request) - # Convert return value to protobuf type - return_value = message.ListMessagesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = section.DeleteSectionRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the response is the type that we expect. + assert response is None - response = client.list_messages(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_delete_section_async_from_dict(): + await test_delete_section_async(request_type=dict) -def test_list_messages_rest_unset_required_fields(): - transport = transports.ChatServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_delete_section_field_headers(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.list_messages._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - "showDeleted", - ) - ) - & set(("parent",)) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = section.DeleteSectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_section), "__call__") as call: + call.return_value = None + client.delete_section(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_section_field_headers_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = section.DeleteSectionRequest() + + request.name = "name_value" -def test_list_messages_rest_flattened(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_section), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_section(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_section_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = message.ListMessagesResponse() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_section( + name="name_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "spaces/sample1"} + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", + +def test_delete_section_flattened_error(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_section( + section.DeleteSectionRequest(), + name="name_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = message.ListMessagesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_messages(**mock_args) +@pytest.mark.asyncio +async def test_delete_section_flattened_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_section( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=spaces/*}/messages" % client.transport._host, args[1] - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_list_messages_rest_flattened_error(transport: str = "rest"): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_delete_section_flattened_error_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_messages( - message.ListMessagesRequest(), - parent="parent_value", + await client.delete_section( + section.DeleteSectionRequest(), + name="name_value", ) -def test_list_messages_rest_pager(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + gc_section.UpdateSectionRequest, + dict, + ], +) +def test_update_section(request_type, transport: str = "grpc"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - message.ListMessagesResponse( - messages=[ - message.Message(), - message.Message(), - message.Message(), - ], - next_page_token="abc", - ), - message.ListMessagesResponse( - messages=[], - next_page_token="def", - ), - message.ListMessagesResponse( - messages=[ - message.Message(), - ], - next_page_token="ghi", - ), - message.ListMessagesResponse( - messages=[ - message.Message(), - message.Message(), - ], - ), + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gc_section.Section( + name="name_value", + display_name="display_name_value", + sort_order=1091, + type_=gc_section.Section.SectionType.CUSTOM_SECTION, ) - # Two responses for two calls - response = response + response + response = client.update_section(request) - # Wrap the values into proper Response objs - response = tuple(message.ListMessagesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gc_section.UpdateSectionRequest() + assert args[0] == request - sample_request = {"parent": "spaces/sample1"} + # Establish that the response is the type that we expect. + assert isinstance(response, gc_section.Section) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.sort_order == 1091 + assert response.type_ == gc_section.Section.SectionType.CUSTOM_SECTION - pager = client.list_messages(request=sample_request) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, message.Message) for i in results) +def test_update_section_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - pages = list(client.list_messages(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gc_section.UpdateSectionRequest() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_section), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_section(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gc_section.UpdateSectionRequest() -def test_list_memberships_rest_use_cached_wrapped_rpc(): + +def test_update_section_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -14244,263 +14507,336 @@ def test_list_memberships_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_memberships in client._transport._wrapped_methods + assert client._transport.update_section in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_memberships] = ( - mock_rpc - ) - + client._transport._wrapped_methods[client._transport.update_section] = mock_rpc request = {} - client.list_memberships(request) + client.update_section(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_memberships(request) + client.update_section(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_memberships_rest_required_fields( - request_type=membership.ListMembershipsRequest, +@pytest.mark.asyncio +async def test_update_section_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ChatServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.update_section + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_memberships._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_section + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.update_section(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_memberships._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - "show_groups", - "show_invited", - "use_admin_access", - ) + await client.update_section(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_section_async( + transport: str = "grpc_asyncio", request_type=gc_section.UpdateSectionRequest +): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gc_section.Section( + name="name_value", + display_name="display_name_value", + sort_order=1091, + type_=gc_section.Section.SectionType.CUSTOM_SECTION, + ) + ) + response = await client.update_section(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gc_section.UpdateSectionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gc_section.Section) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.sort_order == 1091 + assert response.type_ == gc_section.Section.SectionType.CUSTOM_SECTION + +@pytest.mark.asyncio +async def test_update_section_async_from_dict(): + await test_update_section_async(request_type=dict) + + +def test_update_section_field_headers(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = membership.ListMembershipsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gc_section.UpdateSectionRequest() - response_value = Response() - response_value.status_code = 200 + request.section.name = "name_value" - # Convert return value to protobuf type - return_value = membership.ListMembershipsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_section), "__call__") as call: + call.return_value = gc_section.Section() + client.update_section(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - response = client.list_memberships(request) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "section.name=name_value", + ) in kw["metadata"] - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_update_section_field_headers_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) -def test_list_memberships_rest_unset_required_fields(): - transport = transports.ChatServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gc_section.UpdateSectionRequest() + + request.section.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_section), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gc_section.Section()) + await client.update_section(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "section.name=name_value", + ) in kw["metadata"] + + +def test_update_section_flattened(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.list_memberships._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - "showGroups", - "showInvited", - "useAdminAccess", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gc_section.Section() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_section( + section=gc_section.Section(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - & set(("parent",)) - ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].section + mock_val = gc_section.Section(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val -def test_list_memberships_rest_flattened(): +def test_update_section_flattened_error(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = membership.ListMembershipsResponse() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_section( + gc_section.UpdateSectionRequest(), + section=gc_section.Section(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "spaces/sample1"} - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) +@pytest.mark.asyncio +async def test_update_section_flattened_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = membership.ListMembershipsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gc_section.Section() - client.list_memberships(**mock_args) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gc_section.Section()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_section( + section=gc_section.Section(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=spaces/*}/members" % client.transport._host, args[1] - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].section + mock_val = gc_section.Section(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val -def test_list_memberships_rest_flattened_error(transport: str = "rest"): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_update_section_flattened_error_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_memberships( - membership.ListMembershipsRequest(), - parent="parent_value", + await client.update_section( + gc_section.UpdateSectionRequest(), + section=gc_section.Section(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_memberships_rest_pager(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + section.ListSectionsRequest, + dict, + ], +) +def test_list_sections(request_type, transport: str = "grpc"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - membership.ListMembershipsResponse( - memberships=[ - membership.Membership(), - membership.Membership(), - membership.Membership(), - ], - next_page_token="abc", - ), - membership.ListMembershipsResponse( - memberships=[], - next_page_token="def", - ), - membership.ListMembershipsResponse( - memberships=[ - membership.Membership(), - ], - next_page_token="ghi", - ), - membership.ListMembershipsResponse( - memberships=[ - membership.Membership(), - membership.Membership(), - ], - ), - ) - # Two responses for two calls - response = response + response + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the values into proper Response objs - response = tuple( - membership.ListMembershipsResponse.to_json(x) for x in response + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = section.ListSectionsResponse( + next_page_token="next_page_token_value", ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + response = client.list_sections(request) - sample_request = {"parent": "spaces/sample1"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = section.ListSectionsRequest() + assert args[0] == request - pager = client.list_memberships(request=sample_request) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSectionsPager) + assert response.next_page_token == "next_page_token_value" - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, membership.Membership) for i in results) - pages = list(client.list_memberships(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_list_sections_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = section.ListSectionsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sections), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_sections(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == section.ListSectionsRequest( + parent="parent_value", + page_token="page_token_value", + ) -def test_get_membership_rest_use_cached_wrapped_rpc(): +def test_list_sections_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -14508,351 +14844,513 @@ def test_get_membership_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_membership in client._transport._wrapped_methods + assert client._transport.list_sections in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_membership] = mock_rpc - + client._transport._wrapped_methods[client._transport.list_sections] = mock_rpc request = {} - client.get_membership(request) + client.list_sections(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_membership(request) + client.list_sections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_membership_rest_required_fields( - request_type=membership.GetMembershipRequest, +@pytest.mark.asyncio +async def test_list_sections_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ChatServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.list_sections + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_membership._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_sections + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.list_sections(request) - jsonified_request["name"] = "name_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_membership._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("use_admin_access",)) - jsonified_request.update(unset_fields) + await client.list_sections(request) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + +@pytest.mark.asyncio +async def test_list_sections_async( + transport: str = "grpc_asyncio", request_type=section.ListSectionsRequest +): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = membership.Membership() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + section.ListSectionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_sections(request) - # Convert return value to protobuf type - return_value = membership.Membership.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = section.ListSectionsRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSectionsAsyncPager) + assert response.next_page_token == "next_page_token_value" - response = client.get_membership(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_list_sections_async_from_dict(): + await test_list_sections_async(request_type=dict) -def test_get_membership_rest_unset_required_fields(): - transport = transports.ChatServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_list_sections_field_headers(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.get_membership._get_unset_required_fields({}) - assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = section.ListSectionsRequest() + request.parent = "parent_value" -def test_get_membership_rest_flattened(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sections), "__call__") as call: + call.return_value = section.ListSectionsResponse() + client.list_sections(request) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = membership.Membership() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # get arguments that satisfy an http rule for this method - sample_request = {"name": "spaces/sample1/members/sample2"} + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = membership.Membership.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} +@pytest.mark.asyncio +async def test_list_sections_field_headers_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) - client.get_membership(**mock_args) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = section.ListSectionsRequest() - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=spaces/*/members/*}" % client.transport._host, args[1] + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sections), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + section.ListSectionsResponse() ) + await client.list_sections(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_get_membership_rest_flattened_error(transport: str = "rest"): + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_sections_flattened(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = section.ListSectionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_sections( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_sections_flattened_error(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_membership( - membership.GetMembershipRequest(), - name="name_value", + client.list_sections( + section.ListSectionsRequest(), + parent="parent_value", ) -def test_get_message_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +@pytest.mark.asyncio +async def test_list_sections_flattened_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Ensure method has been cached - assert client._transport.get_message in client._transport._wrapped_methods + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = section.ListSectionsResponse() - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + section.ListSectionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_sections( + parent="parent_value", ) - client._transport._wrapped_methods[client._transport.get_message] = mock_rpc - - request = {} - client.get_message(request) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val - client.get_message(request) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 +@pytest.mark.asyncio +async def test_list_sections_flattened_error_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_sections( + section.ListSectionsRequest(), + parent="parent_value", + ) -def test_get_message_rest_required_fields(request_type=message.GetMessageRequest): - transport_class = transports.ChatServiceRestTransport - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +def test_list_sections_pager(transport_name: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_message._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sections), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + section.ListSectionsResponse( + sections=[ + section.Section(), + section.Section(), + section.Section(), + ], + next_page_token="abc", + ), + section.ListSectionsResponse( + sections=[], + next_page_token="def", + ), + section.ListSectionsResponse( + sections=[ + section.Section(), + ], + next_page_token="ghi", + ), + section.ListSectionsResponse( + sections=[ + section.Section(), + section.Section(), + ], + ), + RuntimeError, + ) - # verify required fields with default values are now present + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_sections(request={}, retry=retry, timeout=timeout) - jsonified_request["name"] = "name_value" + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_message._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, section.Section) for i in results) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +def test_list_sections_pages(transport_name: str = "grpc"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport_name, ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = message.Message() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sections), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + section.ListSectionsResponse( + sections=[ + section.Section(), + section.Section(), + section.Section(), + ], + next_page_token="abc", + ), + section.ListSectionsResponse( + sections=[], + next_page_token="def", + ), + section.ListSectionsResponse( + sections=[ + section.Section(), + ], + next_page_token="ghi", + ), + section.ListSectionsResponse( + sections=[ + section.Section(), + section.Section(), + ], + ), + RuntimeError, + ) + pages = list(client.list_sections(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Convert return value to protobuf type - return_value = message.Message.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} +@pytest.mark.asyncio +async def test_list_sections_async_pager(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) - response = client.get_message(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sections), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + section.ListSectionsResponse( + sections=[ + section.Section(), + section.Section(), + section.Section(), + ], + next_page_token="abc", + ), + section.ListSectionsResponse( + sections=[], + next_page_token="def", + ), + section.ListSectionsResponse( + sections=[ + section.Section(), + ], + next_page_token="ghi", + ), + section.ListSectionsResponse( + sections=[ + section.Section(), + section.Section(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_sections( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + assert len(responses) == 6 + assert all(isinstance(i, section.Section) for i in responses) -def test_get_message_rest_unset_required_fields(): - transport = transports.ChatServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_list_sections_async_pages(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.get_message._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sections), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + section.ListSectionsResponse( + sections=[ + section.Section(), + section.Section(), + section.Section(), + ], + next_page_token="abc", + ), + section.ListSectionsResponse( + sections=[], + next_page_token="def", + ), + section.ListSectionsResponse( + sections=[ + section.Section(), + ], + next_page_token="ghi", + ), + section.ListSectionsResponse( + sections=[ + section.Section(), + section.Section(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_sections(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_get_message_rest_flattened(): +@pytest.mark.parametrize( + "request_type", + [ + section.PositionSectionRequest, + dict, + ], +) +def test_position_section(request_type, transport: str = "grpc"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = message.Message() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "spaces/sample1/messages/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = message.Message.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.position_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = section.PositionSectionResponse() + response = client.position_section(request) - client.get_message(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = section.PositionSectionRequest() + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=spaces/*/messages/*}" % client.transport._host, args[1] - ) + # Establish that the response is the type that we expect. + assert isinstance(response, section.PositionSectionResponse) -def test_get_message_rest_flattened_error(transport: str = "rest"): +def test_position_section_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_message( - message.GetMessageRequest(), + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = section.PositionSectionRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.position_section), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.position_section(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == section.PositionSectionRequest( name="name_value", ) -def test_update_message_rest_use_cached_wrapped_rpc(): +def test_position_section_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -14860,364 +15358,247 @@ def test_update_message_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_message in client._transport._wrapped_methods + assert client._transport.position_section in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_message] = mock_rpc - + client._transport._wrapped_methods[client._transport.position_section] = ( + mock_rpc + ) request = {} - client.update_message(request) + client.position_section(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_message(request) + client.position_section(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_message_rest_required_fields( - request_type=gc_message.UpdateMessageRequest, +@pytest.mark.asyncio +async def test_position_section_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ChatServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # verify fields with default values are dropped + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_message._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Ensure method has been cached + assert ( + client._client._transport.position_section + in client._client._transport._wrapped_methods + ) - # verify required fields with default values are now present + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.position_section + ] = mock_rpc - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_message._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "update_mask", - ) - ) - jsonified_request.update(unset_fields) + request = {} + await client.position_section(request) - # verify required fields with non-default values are left alone + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + await client.position_section(request) - # Designate an appropriate value for the returned response. - return_value = gc_message.Message() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "put", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gc_message.Message.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) +@pytest.mark.asyncio +async def test_position_section_async( + transport: str = "grpc_asyncio", request_type=section.PositionSectionRequest +): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.update_message(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.position_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + section.PositionSectionResponse() + ) + response = await client.position_section(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = section.PositionSectionRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, section.PositionSectionResponse) -def test_update_message_rest_unset_required_fields(): - transport = transports.ChatServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.update_message._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "updateMask", - ) - ) - & set( - ( - "message", - "updateMask", - ) - ) - ) +@pytest.mark.asyncio +async def test_position_section_async_from_dict(): + await test_position_section_async(request_type=dict) -def test_update_message_rest_flattened(): +def test_position_section_field_headers(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gc_message.Message() - - # get arguments that satisfy an http rule for this method - sample_request = {"message": {"name": "spaces/sample1/messages/sample2"}} + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = section.PositionSectionRequest() - # get truthy value for each flattened field - mock_args = dict( - message=gc_message.Message(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) + request.name = "name_value" - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gc_message.Message.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.position_section), "__call__") as call: + call.return_value = section.PositionSectionResponse() + client.position_section(request) - client.update_message(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{message.name=spaces/*/messages/*}" % client.transport._host, args[1] - ) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_update_message_rest_flattened_error(transport: str = "rest"): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_position_section_field_headers_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_message( - gc_message.UpdateMessageRequest(), - message=gc_message.Message(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_delete_message_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = section.PositionSectionRequest() - # Ensure method has been cached - assert client._transport.delete_message in client._transport._wrapped_methods + request.name = "name_value" - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.position_section), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + section.PositionSectionResponse() ) - client._transport._wrapped_methods[client._transport.delete_message] = mock_rpc - - request = {} - client.delete_message(request) + await client.position_section(request) # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_message(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_message_rest_required_fields(request_type=message.DeleteMessageRequest): - transport_class = transports.ChatServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_message._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_message._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force",)) - jsonified_request.update(unset_fields) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +@pytest.mark.parametrize( + "request_type", + [ + section.ListSectionItemsRequest, + dict, + ], +) +def test_list_section_items(request_type, transport: str = "grpc"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_message(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_section_items), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = section.ListSectionItemsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_section_items(request) -def test_delete_message_rest_unset_required_fields(): - transport = transports.ChatServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = section.ListSectionItemsRequest() + assert args[0] == request - unset_fields = transport.delete_message._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force",)) & set(("name",))) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSectionItemsPager) + assert response.next_page_token == "next_page_token_value" -def test_delete_message_rest_flattened(): +def test_list_section_items_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "spaces/sample1/messages/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_message(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=spaces/*/messages/*}" % client.transport._host, args[1] - ) - - -def test_delete_message_rest_flattened_error(transport: str = "rest"): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = section.ListSectionItemsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_message( - message.DeleteMessageRequest(), - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_section_items), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_section_items(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == section.ListSectionItemsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", ) -def test_get_attachment_rest_use_cached_wrapped_rpc(): +def test_list_section_items_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -15225,311 +15606,541 @@ def test_get_attachment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_attachment in client._transport._wrapped_methods + assert ( + client._transport.list_section_items in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_attachment] = mock_rpc - + client._transport._wrapped_methods[client._transport.list_section_items] = ( + mock_rpc + ) request = {} - client.get_attachment(request) + client.list_section_items(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_attachment(request) + client.list_section_items(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_attachment_rest_required_fields( - request_type=attachment.GetAttachmentRequest, +@pytest.mark.asyncio +async def test_list_section_items_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ChatServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.list_section_items + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_section_items + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.list_section_items(request) - jsonified_request["name"] = "name_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + await client.list_section_items(request) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + +@pytest.mark.asyncio +async def test_list_section_items_async( + transport: str = "grpc_asyncio", request_type=section.ListSectionItemsRequest +): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = attachment.Attachment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_section_items), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + section.ListSectionItemsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_section_items(request) - # Convert return value to protobuf type - return_value = attachment.Attachment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = section.ListSectionItemsRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSectionItemsAsyncPager) + assert response.next_page_token == "next_page_token_value" - response = client.get_attachment(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_list_section_items_async_from_dict(): + await test_list_section_items_async(request_type=dict) -def test_get_attachment_rest_unset_required_fields(): - transport = transports.ChatServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_list_section_items_field_headers(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.get_attachment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = section.ListSectionItemsRequest() + request.parent = "parent_value" -def test_get_attachment_rest_flattened(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_section_items), "__call__" + ) as call: + call.return_value = section.ListSectionItemsResponse() + client.list_section_items(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_section_items_field_headers_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = attachment.Attachment() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = section.ListSectionItemsRequest() - # get arguments that satisfy an http rule for this method - sample_request = {"name": "spaces/sample1/messages/sample2/attachments/sample3"} + request.parent = "parent_value" - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_section_items), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + section.ListSectionItemsResponse() ) - mock_args.update(sample_request) + await client.list_section_items(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = attachment.Attachment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.get_attachment(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_section_items_flattened(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_section_items), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = section.ListSectionItemsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_section_items( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=spaces/*/messages/*/attachments/*}" % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_get_attachment_rest_flattened_error(transport: str = "rest"): +def test_list_section_items_flattened_error(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_attachment( - attachment.GetAttachmentRequest(), - name="name_value", + client.list_section_items( + section.ListSectionItemsRequest(), + parent="parent_value", ) -def test_upload_attachment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +@pytest.mark.asyncio +async def test_list_section_items_flattened_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Ensure method has been cached - assert client._transport.upload_attachment in client._transport._wrapped_methods + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_section_items), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = section.ListSectionItemsResponse() - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + section.ListSectionItemsResponse() ) - client._transport._wrapped_methods[client._transport.upload_attachment] = ( - mock_rpc + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_section_items( + parent="parent_value", ) - request = {} - client.upload_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.upload_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_upload_attachment_rest_required_fields( - request_type=attachment.UploadAttachmentRequest, -): - transport_class = transports.ChatServiceRestTransport - request_init = {} - request_init["parent"] = "" - request_init["filename"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_list_section_items_flattened_error_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).upload_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_section_items( + section.ListSectionItemsRequest(), + parent="parent_value", + ) - # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - jsonified_request["filename"] = "filename_value" +def test_list_section_items_pager(transport_name: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).upload_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_section_items), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + section.ListSectionItemsResponse( + section_items=[ + section.SectionItem(), + section.SectionItem(), + section.SectionItem(), + ], + next_page_token="abc", + ), + section.ListSectionItemsResponse( + section_items=[], + next_page_token="def", + ), + section.ListSectionItemsResponse( + section_items=[ + section.SectionItem(), + ], + next_page_token="ghi", + ), + section.ListSectionItemsResponse( + section_items=[ + section.SectionItem(), + section.SectionItem(), + ], + ), + RuntimeError, + ) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "filename" in jsonified_request - assert jsonified_request["filename"] == "filename_value" + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_section_items(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, section.SectionItem) for i in results) + + +def test_list_section_items_pages(transport_name: str = "grpc"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport_name, ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = attachment.UploadAttachmentResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_section_items), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + section.ListSectionItemsResponse( + section_items=[ + section.SectionItem(), + section.SectionItem(), + section.SectionItem(), + ], + next_page_token="abc", + ), + section.ListSectionItemsResponse( + section_items=[], + next_page_token="def", + ), + section.ListSectionItemsResponse( + section_items=[ + section.SectionItem(), + ], + next_page_token="ghi", + ), + section.ListSectionItemsResponse( + section_items=[ + section.SectionItem(), + section.SectionItem(), + ], + ), + RuntimeError, + ) + pages = list(client.list_section_items(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Convert return value to protobuf type - return_value = attachment.UploadAttachmentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} +@pytest.mark.asyncio +async def test_list_section_items_async_pager(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) - response = client.upload_attachment(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_section_items), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + section.ListSectionItemsResponse( + section_items=[ + section.SectionItem(), + section.SectionItem(), + section.SectionItem(), + ], + next_page_token="abc", + ), + section.ListSectionItemsResponse( + section_items=[], + next_page_token="def", + ), + section.ListSectionItemsResponse( + section_items=[ + section.SectionItem(), + ], + next_page_token="ghi", + ), + section.ListSectionItemsResponse( + section_items=[ + section.SectionItem(), + section.SectionItem(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_section_items( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + assert len(responses) == 6 + assert all(isinstance(i, section.SectionItem) for i in responses) -def test_upload_attachment_rest_unset_required_fields(): - transport = transports.ChatServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_list_section_items_async_pages(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.upload_attachment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "filename", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_section_items), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + section.ListSectionItemsResponse( + section_items=[ + section.SectionItem(), + section.SectionItem(), + section.SectionItem(), + ], + next_page_token="abc", + ), + section.ListSectionItemsResponse( + section_items=[], + next_page_token="def", + ), + section.ListSectionItemsResponse( + section_items=[ + section.SectionItem(), + ], + next_page_token="ghi", + ), + section.ListSectionItemsResponse( + section_items=[ + section.SectionItem(), + section.SectionItem(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_section_items(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + section.MoveSectionItemRequest, + dict, + ], +) +def test_move_section_item(request_type, transport: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -def test_list_spaces_rest_use_cached_wrapped_rpc(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.move_section_item), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = section.MoveSectionItemResponse() + response = client.move_section_item(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = section.MoveSectionItemRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, section.MoveSectionItemResponse) + + +def test_move_section_item_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = section.MoveSectionItemRequest( + name="name_value", + target_section="target_section_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.move_section_item), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.move_section_item(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == section.MoveSectionItemRequest( + name="name_value", + target_section="target_section_value", + ) + + +def test_move_section_item_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -15537,90 +16148,271 @@ def test_list_spaces_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_spaces in client._transport._wrapped_methods + assert client._transport.move_section_item in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_spaces] = mock_rpc - + client._transport._wrapped_methods[client._transport.move_section_item] = ( + mock_rpc + ) request = {} - client.list_spaces(request) + client.move_section_item(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_spaces(request) + client.move_section_item(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_spaces_rest_pager(transport: str = "rest"): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +@pytest.mark.asyncio +async def test_move_section_item_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - space.ListSpacesResponse( - spaces=[ - space.Space(), - space.Space(), - space.Space(), - ], - next_page_token="abc", - ), - space.ListSpacesResponse( - spaces=[], - next_page_token="def", - ), - space.ListSpacesResponse( - spaces=[ - space.Space(), - ], - next_page_token="ghi", - ), - space.ListSpacesResponse( - spaces=[ - space.Space(), - space.Space(), - ], - ), + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.move_section_item + in client._client._transport._wrapped_methods ) - # Two responses for two calls - response = response + response - # Wrap the values into proper Response objs - response = tuple(space.ListSpacesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.move_section_item + ] = mock_rpc - sample_request = {} + request = {} + await client.move_section_item(request) - pager = client.list_spaces(request=sample_request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, space.Space) for i in results) + await client.move_section_item(request) - pages = list(client.list_spaces(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_search_spaces_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_move_section_item_async( + transport: str = "grpc_asyncio", request_type=section.MoveSectionItemRequest +): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.move_section_item), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + section.MoveSectionItemResponse() + ) + response = await client.move_section_item(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = section.MoveSectionItemRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, section.MoveSectionItemResponse) + + +@pytest.mark.asyncio +async def test_move_section_item_async_from_dict(): + await test_move_section_item_async(request_type=dict) + + +def test_move_section_item_field_headers(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = section.MoveSectionItemRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.move_section_item), "__call__" + ) as call: + call.return_value = section.MoveSectionItemResponse() + client.move_section_item(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_move_section_item_field_headers_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = section.MoveSectionItemRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.move_section_item), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + section.MoveSectionItemResponse() + ) + await client.move_section_item(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_move_section_item_flattened(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.move_section_item), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = section.MoveSectionItemResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.move_section_item( + name="name_value", + target_section="target_section_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].target_section + mock_val = "target_section_value" + assert arg == mock_val + + +def test_move_section_item_flattened_error(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.move_section_item( + section.MoveSectionItemRequest(), + name="name_value", + target_section="target_section_value", + ) + + +@pytest.mark.asyncio +async def test_move_section_item_flattened_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.move_section_item), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = section.MoveSectionItemResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + section.MoveSectionItemResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.move_section_item( + name="name_value", + target_section="target_section_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].target_section + mock_val = "target_section_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_move_section_item_flattened_error_async(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.move_section_item( + section.MoveSectionItemRequest(), + name="name_value", + target_section="target_section_value", + ) + + +def test_create_message_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15634,33 +16426,35 @@ def test_search_spaces_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.search_spaces in client._transport._wrapped_methods + assert client._transport.create_message in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.search_spaces] = mock_rpc + client._transport._wrapped_methods[client._transport.create_message] = mock_rpc request = {} - client.search_spaces(request) + client.create_message(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.search_spaces(request) + client.create_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_search_spaces_rest_required_fields(request_type=space.SearchSpacesRequest): +def test_create_message_rest_required_fields( + request_type=gc_message.CreateMessageRequest, +): transport_class = transports.ChatServiceRestTransport request_init = {} - request_init["query"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15668,37 +16462,33 @@ def test_search_spaces_rest_required_fields(request_type=space.SearchSpacesReque ) # verify fields with default values are dropped - assert "query" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).search_spaces._get_unset_required_fields(jsonified_request) + ).create_message._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "query" in jsonified_request - assert jsonified_request["query"] == request_init["query"] - jsonified_request["query"] = "query_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).search_spaces._get_unset_required_fields(jsonified_request) + ).create_message._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "order_by", - "page_size", - "page_token", - "query", - "use_admin_access", + "message_id", + "message_reply_option", + "request_id", + "thread_key", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "query" in jsonified_request - assert jsonified_request["query"] == "query_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15707,7 +16497,7 @@ def test_search_spaces_rest_required_fields(request_type=space.SearchSpacesReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = space.SearchSpacesResponse() + return_value = gc_message.Message() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15719,117 +16509,115 @@ def test_search_spaces_rest_required_fields(request_type=space.SearchSpacesReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = space.SearchSpacesResponse.pb(return_value) + return_value = gc_message.Message.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_spaces(request) + response = client.create_message(request) - expected_params = [ - ( - "query", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_search_spaces_rest_unset_required_fields(): +def test_create_message_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.search_spaces._get_unset_required_fields({}) + unset_fields = transport.create_message._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "orderBy", - "pageSize", - "pageToken", - "query", - "useAdminAccess", + "messageId", + "messageReplyOption", + "requestId", + "threadKey", + ) + ) + & set( + ( + "parent", + "message", ) ) - & set(("query",)) ) -def test_search_spaces_rest_pager(transport: str = "rest"): +def test_create_message_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - space.SearchSpacesResponse( - spaces=[ - space.Space(), - space.Space(), - space.Space(), - ], - next_page_token="abc", - ), - space.SearchSpacesResponse( - spaces=[], - next_page_token="def", - ), - space.SearchSpacesResponse( - spaces=[ - space.Space(), - ], - next_page_token="ghi", - ), - space.SearchSpacesResponse( - spaces=[ - space.Space(), - space.Space(), - ], - ), + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gc_message.Message() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "spaces/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + message=gc_message.Message(name="name_value"), + message_id="message_id_value", ) - # Two responses for two calls - response = response + response + mock_args.update(sample_request) - # Wrap the values into proper Response objs - response = tuple(space.SearchSpacesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gc_message.Message.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - sample_request = {} + client.create_message(**mock_args) - pager = client.search_spaces(request=sample_request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=spaces/*}/messages" % client.transport._host, args[1] + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, space.Space) for i in results) - pages = list(client.search_spaces(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_create_message_rest_flattened_error(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_message( + gc_message.CreateMessageRequest(), + parent="parent_value", + message=gc_message.Message(name="name_value"), + message_id="message_id_value", + ) -def test_get_space_rest_use_cached_wrapped_rpc(): +def test_list_messages_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15843,33 +16631,33 @@ def test_get_space_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_space in client._transport._wrapped_methods + assert client._transport.list_messages in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_space] = mock_rpc + client._transport._wrapped_methods[client._transport.list_messages] = mock_rpc request = {} - client.get_space(request) + client.list_messages(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_space(request) + client.list_messages(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_space_rest_required_fields(request_type=space.GetSpaceRequest): +def test_list_messages_rest_required_fields(request_type=message.ListMessagesRequest): transport_class = transports.ChatServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15880,23 +16668,31 @@ def test_get_space_rest_required_fields(request_type=space.GetSpaceRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_space._get_unset_required_fields(jsonified_request) + ).list_messages._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_space._get_unset_required_fields(jsonified_request) + ).list_messages._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("use_admin_access",)) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "show_deleted", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15905,7 +16701,7 @@ def test_get_space_rest_required_fields(request_type=space.GetSpaceRequest): request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = space.Space() + return_value = message.ListMessagesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15926,30 +16722,41 @@ def test_get_space_rest_required_fields(request_type=space.GetSpaceRequest): response_value.status_code = 200 # Convert return value to protobuf type - return_value = space.Space.pb(return_value) + return_value = message.ListMessagesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_space(request) + response = client.list_messages(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_space_rest_unset_required_fields(): +def test_list_messages_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) + unset_fields = transport.list_messages._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "showDeleted", + ) + ) + & set(("parent",)) + ) -def test_get_space_rest_flattened(): +def test_list_messages_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15958,14 +16765,14 @@ def test_get_space_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space.Space() + return_value = message.ListMessagesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "spaces/sample1"} + sample_request = {"parent": "spaces/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -15973,24 +16780,24 @@ def test_get_space_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = space.Space.pb(return_value) + return_value = message.ListMessagesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_space(**mock_args) + client.list_messages(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=spaces/*}" % client.transport._host, args[1] + "%s/v1/{parent=spaces/*}/messages" % client.transport._host, args[1] ) -def test_get_space_rest_flattened_error(transport: str = "rest"): +def test_list_messages_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15999,52 +16806,118 @@ def test_get_space_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_space( - space.GetSpaceRequest(), - name="name_value", + client.list_messages( + message.ListMessagesRequest(), + parent="parent_value", ) -def test_create_space_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", +def test_list_messages_rest_pager(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + message.ListMessagesResponse( + messages=[ + message.Message(), + message.Message(), + message.Message(), + ], + next_page_token="abc", + ), + message.ListMessagesResponse( + messages=[], + next_page_token="def", + ), + message.ListMessagesResponse( + messages=[ + message.Message(), + ], + next_page_token="ghi", + ), + message.ListMessagesResponse( + messages=[ + message.Message(), + message.Message(), + ], + ), ) + # Two responses for two calls + response = response + response - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Wrap the values into proper Response objs + response = tuple(message.ListMessagesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - # Ensure method has been cached - assert client._transport.create_space in client._transport._wrapped_methods + sample_request = {"parent": "spaces/sample1"} - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( + pager = client.list_messages(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, message.Message) for i in results) + + pages = list(client.list_messages(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_memberships_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_memberships in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_space] = mock_rpc + client._transport._wrapped_methods[client._transport.list_memberships] = ( + mock_rpc + ) request = {} - client.create_space(request) + client.list_memberships(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_space(request) + client.list_memberships(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_space_rest_required_fields(request_type=gc_space.CreateSpaceRequest): +def test_list_memberships_rest_required_fields( + request_type=membership.ListMembershipsRequest, +): transport_class = transports.ChatServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16055,19 +16928,32 @@ def test_create_space_rest_required_fields(request_type=gc_space.CreateSpaceRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_space._get_unset_required_fields(jsonified_request) + ).list_memberships._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_space._get_unset_required_fields(jsonified_request) + ).list_memberships._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + "show_groups", + "show_invited", + "use_admin_access", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16076,7 +16962,7 @@ def test_create_space_rest_required_fields(request_type=gc_space.CreateSpaceRequ request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gc_space.Space() + return_value = membership.ListMembershipsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16088,40 +16974,51 @@ def test_create_space_rest_required_fields(request_type=gc_space.CreateSpaceRequ pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gc_space.Space.pb(return_value) + return_value = membership.ListMembershipsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_space(request) + response = client.list_memberships(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_space_rest_unset_required_fields(): +def test_list_memberships_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("space",))) + unset_fields = transport.list_memberships._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + "showGroups", + "showInvited", + "useAdminAccess", + ) + ) + & set(("parent",)) + ) -def test_create_space_rest_flattened(): +def test_list_memberships_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16130,14 +17027,14 @@ def test_create_space_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gc_space.Space() + return_value = membership.ListMembershipsResponse() # get arguments that satisfy an http rule for this method - sample_request = {} + sample_request = {"parent": "spaces/sample1"} # get truthy value for each flattened field mock_args = dict( - space=gc_space.Space(name="name_value"), + parent="parent_value", ) mock_args.update(sample_request) @@ -16145,22 +17042,24 @@ def test_create_space_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gc_space.Space.pb(return_value) + return_value = membership.ListMembershipsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_space(**mock_args) + client.list_memberships(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/spaces" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=spaces/*}/members" % client.transport._host, args[1] + ) -def test_create_space_rest_flattened_error(transport: str = "rest"): +def test_list_memberships_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16169,13 +17068,76 @@ def test_create_space_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_space( - gc_space.CreateSpaceRequest(), - space=gc_space.Space(name="name_value"), + client.list_memberships( + membership.ListMembershipsRequest(), + parent="parent_value", ) -def test_set_up_space_rest_use_cached_wrapped_rpc(): +def test_list_memberships_rest_pager(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + membership.ListMembershipsResponse( + memberships=[ + membership.Membership(), + membership.Membership(), + membership.Membership(), + ], + next_page_token="abc", + ), + membership.ListMembershipsResponse( + memberships=[], + next_page_token="def", + ), + membership.ListMembershipsResponse( + memberships=[ + membership.Membership(), + ], + next_page_token="ghi", + ), + membership.ListMembershipsResponse( + memberships=[ + membership.Membership(), + membership.Membership(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + membership.ListMembershipsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "spaces/sample1"} + + pager = client.list_memberships(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, membership.Membership) for i in results) + + pages = list(client.list_memberships(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_membership_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16189,32 +17151,35 @@ def test_set_up_space_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.set_up_space in client._transport._wrapped_methods + assert client._transport.get_membership in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.set_up_space] = mock_rpc + client._transport._wrapped_methods[client._transport.get_membership] = mock_rpc request = {} - client.set_up_space(request) + client.get_membership(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.set_up_space(request) + client.get_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_set_up_space_rest_required_fields(request_type=space_setup.SetUpSpaceRequest): +def test_get_membership_rest_required_fields( + request_type=membership.GetMembershipRequest, +): transport_class = transports.ChatServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16225,17 +17190,23 @@ def test_set_up_space_rest_required_fields(request_type=space_setup.SetUpSpaceRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_up_space._get_unset_required_fields(jsonified_request) + ).get_membership._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_up_space._get_unset_required_fields(jsonified_request) + ).get_membership._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16244,7 +17215,7 @@ def test_set_up_space_rest_required_fields(request_type=space_setup.SetUpSpaceRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = space.Space() + return_value = membership.Membership() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16256,79 +17227,135 @@ def test_set_up_space_rest_required_fields(request_type=space_setup.SetUpSpaceRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = space.Space.pb(return_value) + return_value = membership.Membership.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.set_up_space(request) + response = client.get_membership(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_set_up_space_rest_unset_required_fields(): +def test_get_membership_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.set_up_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("space",))) + unset_fields = transport.get_membership._get_unset_required_fields({}) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) -def test_update_space_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_get_membership_rest_flattened(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = membership.Membership() - # Ensure method has been cached - assert client._transport.update_space in client._transport._wrapped_methods + # get arguments that satisfy an http rule for this method + sample_request = {"name": "spaces/sample1/members/sample2"} - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - client._transport._wrapped_methods[client._transport.update_space] = mock_rpc - - request = {} - client.update_space(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + mock_args.update(sample_request) - client.update_space(request) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = membership.Membership.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_membership(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=spaces/*/members/*}" % client.transport._host, args[1] + ) + + +def test_get_membership_rest_flattened_error(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_membership( + membership.GetMembershipRequest(), + name="name_value", + ) + + +def test_get_message_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_message in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_message] = mock_rpc + + request = {} + client.get_message(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_space_rest_required_fields(request_type=gc_space.UpdateSpaceRequest): +def test_get_message_rest_required_fields(request_type=message.GetMessageRequest): transport_class = transports.ChatServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16339,24 +17366,21 @@ def test_update_space_rest_required_fields(request_type=gc_space.UpdateSpaceRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_space._get_unset_required_fields(jsonified_request) + ).get_message._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_space._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "update_mask", - "use_admin_access", - ) - ) + ).get_message._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16365,7 +17389,7 @@ def test_update_space_rest_required_fields(request_type=gc_space.UpdateSpaceRequ request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gc_space.Space() + return_value = message.Message() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16377,53 +17401,39 @@ def test_update_space_rest_required_fields(request_type=gc_space.UpdateSpaceRequ pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gc_space.Space.pb(return_value) + return_value = message.Message.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_space(request) + response = client.get_message(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_space_rest_unset_required_fields(): +def test_get_message_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_space._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "updateMask", - "useAdminAccess", - ) - ) - & set( - ( - "space", - "updateMask", - ) - ) - ) + unset_fields = transport.get_message._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_update_space_rest_flattened(): +def test_get_message_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16432,15 +17442,14 @@ def test_update_space_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gc_space.Space() + return_value = message.Message() # get arguments that satisfy an http rule for this method - sample_request = {"space": {"name": "spaces/sample1"}} + sample_request = {"name": "spaces/sample1/messages/sample2"} # get truthy value for each flattened field mock_args = dict( - space=gc_space.Space(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -16448,24 +17457,24 @@ def test_update_space_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gc_space.Space.pb(return_value) + return_value = message.Message.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_space(**mock_args) + client.get_message(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{space.name=spaces/*}" % client.transport._host, args[1] + "%s/v1/{name=spaces/*/messages/*}" % client.transport._host, args[1] ) -def test_update_space_rest_flattened_error(transport: str = "rest"): +def test_get_message_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16474,14 +17483,13 @@ def test_update_space_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_space( - gc_space.UpdateSpaceRequest(), - space=gc_space.Space(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_message( + message.GetMessageRequest(), + name="name_value", ) -def test_delete_space_rest_use_cached_wrapped_rpc(): +def test_update_message_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16495,33 +17503,34 @@ def test_delete_space_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_space in client._transport._wrapped_methods + assert client._transport.update_message in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_space] = mock_rpc + client._transport._wrapped_methods[client._transport.update_message] = mock_rpc request = {} - client.delete_space(request) + client.update_message(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_space(request) + client.update_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_space_rest_required_fields(request_type=space.DeleteSpaceRequest): +def test_update_message_rest_required_fields( + request_type=gc_message.UpdateMessageRequest, +): transport_class = transports.ChatServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16532,23 +17541,24 @@ def test_delete_space_rest_required_fields(request_type=space.DeleteSpaceRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_space._get_unset_required_fields(jsonified_request) + ).update_message._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_space._get_unset_required_fields(jsonified_request) + ).update_message._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("use_admin_access",)) + assert not set(unset_fields) - set( + ( + "allow_missing", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16557,7 +17567,7 @@ def test_delete_space_rest_required_fields(request_type=space.DeleteSpaceRequest request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = gc_message.Message() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16569,36 +17579,53 @@ def test_delete_space_rest_required_fields(request_type=space.DeleteSpaceRequest pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "put", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = gc_message.Message.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_space(request) + response = client.update_message(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_space_rest_unset_required_fields(): +def test_update_message_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) + unset_fields = transport.update_message._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "updateMask", + ) + ) + & set( + ( + "message", + "updateMask", + ) + ) + ) -def test_delete_space_rest_flattened(): +def test_update_message_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16607,37 +17634,40 @@ def test_delete_space_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = gc_message.Message() # get arguments that satisfy an http rule for this method - sample_request = {"name": "spaces/sample1"} + sample_request = {"message": {"name": "spaces/sample1/messages/sample2"}} # get truthy value for each flattened field mock_args = dict( - name="name_value", + message=gc_message.Message(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = gc_message.Message.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_space(**mock_args) + client.update_message(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=spaces/*}" % client.transport._host, args[1] + "%s/v1/{message.name=spaces/*/messages/*}" % client.transport._host, args[1] ) -def test_delete_space_rest_flattened_error(transport: str = "rest"): +def test_update_message_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16646,13 +17676,14 @@ def test_delete_space_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_space( - space.DeleteSpaceRequest(), - name="name_value", + client.update_message( + gc_message.UpdateMessageRequest(), + message=gc_message.Message(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_complete_import_space_rest_use_cached_wrapped_rpc(): +def test_delete_message_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16666,36 +17697,29 @@ def test_complete_import_space_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.complete_import_space - in client._transport._wrapped_methods - ) + assert client._transport.delete_message in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.complete_import_space] = ( - mock_rpc - ) + client._transport._wrapped_methods[client._transport.delete_message] = mock_rpc request = {} - client.complete_import_space(request) + client.delete_message(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.complete_import_space(request) + client.delete_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_complete_import_space_rest_required_fields( - request_type=space.CompleteImportSpaceRequest, -): +def test_delete_message_rest_required_fields(request_type=message.DeleteMessageRequest): transport_class = transports.ChatServiceRestTransport request_init = {} @@ -16710,7 +17734,7 @@ def test_complete_import_space_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).complete_import_space._get_unset_required_fields(jsonified_request) + ).delete_message._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -16719,7 +17743,9 @@ def test_complete_import_space_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).complete_import_space._get_unset_required_fields(jsonified_request) + ).delete_message._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -16733,7 +17759,7 @@ def test_complete_import_space_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = space.CompleteImportSpaceResponse() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16745,40 +17771,90 @@ def test_complete_import_space_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = space.CompleteImportSpaceResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.complete_import_space(request) + response = client.delete_message(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_complete_import_space_rest_unset_required_fields(): +def test_delete_message_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.complete_import_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.delete_message._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) -def test_find_direct_message_rest_use_cached_wrapped_rpc(): +def test_delete_message_rest_flattened(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "spaces/sample1/messages/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_message(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=spaces/*/messages/*}" % client.transport._host, args[1] + ) + + +def test_delete_message_rest_flattened_error(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_message( + message.DeleteMessageRequest(), + name="name_value", + ) + + +def test_get_attachment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16792,34 +17868,30 @@ def test_find_direct_message_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.find_direct_message in client._transport._wrapped_methods - ) + assert client._transport.get_attachment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.find_direct_message] = ( - mock_rpc - ) + client._transport._wrapped_methods[client._transport.get_attachment] = mock_rpc request = {} - client.find_direct_message(request) + client.get_attachment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.find_direct_message(request) + client.get_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_find_direct_message_rest_required_fields( - request_type=space.FindDirectMessageRequest, +def test_get_attachment_rest_required_fields( + request_type=attachment.GetAttachmentRequest, ): transport_class = transports.ChatServiceRestTransport @@ -16832,24 +17904,19 @@ def test_find_direct_message_rest_required_fields( ) # verify fields with default values are dropped - assert "name" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).find_direct_message._get_unset_required_fields(jsonified_request) + ).get_attachment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "name" in jsonified_request - assert jsonified_request["name"] == request_init["name"] jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).find_direct_message._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("name",)) + ).get_attachment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -16863,7 +17930,7 @@ def test_find_direct_message_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = space.Space() + return_value = attachment.Attachment() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16884,36 +17951,87 @@ def test_find_direct_message_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = space.Space.pb(return_value) + return_value = attachment.Attachment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.find_direct_message(request) + response = client.get_attachment(request) - expected_params = [ - ( - "name", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_find_direct_message_rest_unset_required_fields(): +def test_get_attachment_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.find_direct_message._get_unset_required_fields({}) - assert set(unset_fields) == (set(("name",)) & set(("name",))) + unset_fields = transport.get_attachment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_create_membership_rest_use_cached_wrapped_rpc(): +def test_get_attachment_rest_flattened(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = attachment.Attachment() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "spaces/sample1/messages/sample2/attachments/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = attachment.Attachment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_attachment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=spaces/*/messages/*/attachments/*}" % client.transport._host, + args[1], + ) + + +def test_get_attachment_rest_flattened_error(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_attachment( + attachment.GetAttachmentRequest(), + name="name_value", + ) + + +def test_upload_attachment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16927,37 +18045,38 @@ def test_create_membership_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_membership in client._transport._wrapped_methods + assert client._transport.upload_attachment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_membership] = ( + client._transport._wrapped_methods[client._transport.upload_attachment] = ( mock_rpc ) request = {} - client.create_membership(request) + client.upload_attachment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_membership(request) + client.upload_attachment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_membership_rest_required_fields( - request_type=gc_membership.CreateMembershipRequest, +def test_upload_attachment_rest_required_fields( + request_type=attachment.UploadAttachmentRequest, ): transport_class = transports.ChatServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["filename"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16968,23 +18087,24 @@ def test_create_membership_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_membership._get_unset_required_fields(jsonified_request) + ).upload_attachment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" + jsonified_request["filename"] = "filename_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_membership._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("use_admin_access",)) + ).upload_attachment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "filename" in jsonified_request + assert jsonified_request["filename"] == "filename_value" client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16993,7 +18113,7 @@ def test_create_membership_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gc_membership.Membership() + return_value = attachment.UploadAttachmentResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17015,96 +18135,135 @@ def test_create_membership_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = gc_membership.Membership.pb(return_value) + return_value = attachment.UploadAttachmentResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_membership(request) + response = client.upload_attachment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_membership_rest_unset_required_fields(): +def test_upload_attachment_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_membership._get_unset_required_fields({}) + unset_fields = transport.upload_attachment._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("useAdminAccess",)) + set(()) & set( ( "parent", - "membership", + "filename", ) ) ) -def test_create_membership_rest_flattened(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_list_spaces_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gc_membership.Membership() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "spaces/sample1"} + # Ensure method has been cached + assert client._transport.list_spaces in client._transport._wrapped_methods - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - membership=gc_membership.Membership(name="name_value"), + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - mock_args.update(sample_request) + client._transport._wrapped_methods[client._transport.list_spaces] = mock_rpc - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gc_membership.Membership.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + request = {} + client.list_spaces(request) - client.create_membership(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=spaces/*}/members" % client.transport._host, args[1] - ) + client.list_spaces(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_create_membership_rest_flattened_error(transport: str = "rest"): +def test_list_spaces_rest_pager(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_membership( - gc_membership.CreateMembershipRequest(), - parent="parent_value", - membership=gc_membership.Membership(name="name_value"), + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.ListSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(space.ListSpacesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + pager = client.list_spaces(request=sample_request) -def test_update_membership_rest_use_cached_wrapped_rpc(): + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, space.Space) for i in results) + + pages = list(client.list_spaces(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_search_spaces_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17118,36 +18277,33 @@ def test_update_membership_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_membership in client._transport._wrapped_methods + assert client._transport.search_spaces in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_membership] = ( - mock_rpc - ) + client._transport._wrapped_methods[client._transport.search_spaces] = mock_rpc request = {} - client.update_membership(request) + client.search_spaces(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_membership(request) + client.search_spaces(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_membership_rest_required_fields( - request_type=gc_membership.UpdateMembershipRequest, -): +def test_search_spaces_rest_required_fields(request_type=space.SearchSpacesRequest): transport_class = transports.ChatServiceRestTransport request_init = {} + request_init["query"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17155,27 +18311,37 @@ def test_update_membership_rest_required_fields( ) # verify fields with default values are dropped + assert "query" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_membership._get_unset_required_fields(jsonified_request) + ).search_spaces._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "query" in jsonified_request + assert jsonified_request["query"] == request_init["query"] + + jsonified_request["query"] = "query_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_membership._get_unset_required_fields(jsonified_request) + ).search_spaces._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "update_mask", + "order_by", + "page_size", + "page_token", + "query", "use_admin_access", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17184,7 +18350,7 @@ def test_update_membership_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gc_membership.Membership() + return_value = space.SearchSpacesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17196,112 +18362,117 @@ def test_update_membership_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gc_membership.Membership.pb(return_value) + return_value = space.SearchSpacesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_membership(request) + response = client.search_spaces(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "query", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_membership_rest_unset_required_fields(): +def test_search_spaces_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_membership._get_unset_required_fields({}) + unset_fields = transport.search_spaces._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "updateMask", + "orderBy", + "pageSize", + "pageToken", + "query", "useAdminAccess", ) ) - & set( - ( - "membership", - "updateMask", - ) - ) + & set(("query",)) ) -def test_update_membership_rest_flattened(): +def test_search_spaces_rest_pager(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gc_membership.Membership() - - # get arguments that satisfy an http rule for this method - sample_request = {"membership": {"name": "spaces/sample1/members/sample2"}} - - # get truthy value for each flattened field - mock_args = dict( - membership=gc_membership.Membership(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.SearchSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gc_membership.Membership.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Two responses for two calls + response = response + response - client.update_membership(**mock_args) + # Wrap the values into proper Response objs + response = tuple(space.SearchSpacesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{membership.name=spaces/*/members/*}" % client.transport._host, - args[1], - ) + sample_request = {} + pager = client.search_spaces(request=sample_request) -def test_update_membership_rest_flattened_error(transport: str = "rest"): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, space.Space) for i in results) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_membership( - gc_membership.UpdateMembershipRequest(), - membership=gc_membership.Membership(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + pages = list(client.search_spaces(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_delete_membership_rest_use_cached_wrapped_rpc(): +def test_get_space_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17315,33 +18486,29 @@ def test_delete_membership_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_membership in client._transport._wrapped_methods + assert client._transport.get_space in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_membership] = ( - mock_rpc - ) + client._transport._wrapped_methods[client._transport.get_space] = mock_rpc request = {} - client.delete_membership(request) + client.get_space(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_membership(request) + client.get_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_membership_rest_required_fields( - request_type=membership.DeleteMembershipRequest, -): +def test_get_space_rest_required_fields(request_type=space.GetSpaceRequest): transport_class = transports.ChatServiceRestTransport request_init = {} @@ -17356,7 +18523,7 @@ def test_delete_membership_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_membership._get_unset_required_fields(jsonified_request) + ).get_space._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -17365,7 +18532,7 @@ def test_delete_membership_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_membership._get_unset_required_fields(jsonified_request) + ).get_space._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) @@ -17381,7 +18548,7 @@ def test_delete_membership_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = membership.Membership() + return_value = space.Space() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17393,7 +18560,7 @@ def test_delete_membership_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result @@ -17402,30 +18569,30 @@ def test_delete_membership_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = membership.Membership.pb(return_value) + return_value = space.Space.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_membership(request) + response = client.get_space(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_membership_rest_unset_required_fields(): +def test_get_space_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_membership._get_unset_required_fields({}) + unset_fields = transport.get_space._get_unset_required_fields({}) assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) -def test_delete_membership_rest_flattened(): +def test_get_space_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17434,10 +18601,10 @@ def test_delete_membership_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = membership.Membership() + return_value = space.Space() # get arguments that satisfy an http rule for this method - sample_request = {"name": "spaces/sample1/members/sample2"} + sample_request = {"name": "spaces/sample1"} # get truthy value for each flattened field mock_args = dict( @@ -17449,24 +18616,24 @@ def test_delete_membership_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = membership.Membership.pb(return_value) + return_value = space.Space.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_membership(**mock_args) + client.get_space(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=spaces/*/members/*}" % client.transport._host, args[1] + "%s/v1/{name=spaces/*}" % client.transport._host, args[1] ) -def test_delete_membership_rest_flattened_error(transport: str = "rest"): +def test_get_space_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17475,13 +18642,13 @@ def test_delete_membership_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_membership( - membership.DeleteMembershipRequest(), + client.get_space( + space.GetSpaceRequest(), name="name_value", ) -def test_create_reaction_rest_use_cached_wrapped_rpc(): +def test_create_space_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17495,35 +18662,32 @@ def test_create_reaction_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_reaction in client._transport._wrapped_methods + assert client._transport.create_space in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_reaction] = mock_rpc + client._transport._wrapped_methods[client._transport.create_space] = mock_rpc request = {} - client.create_reaction(request) + client.create_space(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_reaction(request) + client.create_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_reaction_rest_required_fields( - request_type=gc_reaction.CreateReactionRequest, -): +def test_create_space_rest_required_fields(request_type=gc_space.CreateSpaceRequest): transport_class = transports.ChatServiceRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17534,21 +18698,19 @@ def test_create_reaction_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_reaction._get_unset_required_fields(jsonified_request) + ).create_space._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_reaction._get_unset_required_fields(jsonified_request) + ).create_space._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17557,7 +18719,7 @@ def test_create_reaction_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gc_reaction.Reaction() + return_value = gc_space.Space() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17579,38 +18741,30 @@ def test_create_reaction_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = gc_reaction.Reaction.pb(return_value) + return_value = gc_space.Space.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_reaction(request) + response = client.create_space(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_reaction_rest_unset_required_fields(): +def test_create_space_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_reaction._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "reaction", - ) - ) - ) + unset_fields = transport.create_space._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("space",))) -def test_create_reaction_rest_flattened(): +def test_create_space_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17619,15 +18773,14 @@ def test_create_reaction_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gc_reaction.Reaction() + return_value = gc_space.Space() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "spaces/sample1/messages/sample2"} + sample_request = {} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - reaction=gc_reaction.Reaction(name="name_value"), + space=gc_space.Space(name="name_value"), ) mock_args.update(sample_request) @@ -17635,25 +18788,22 @@ def test_create_reaction_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gc_reaction.Reaction.pb(return_value) + return_value = gc_space.Space.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_reaction(**mock_args) + client.create_space(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=spaces/*/messages/*}/reactions" % client.transport._host, - args[1], - ) + assert path_template.validate("%s/v1/spaces" % client.transport._host, args[1]) -def test_create_reaction_rest_flattened_error(transport: str = "rest"): +def test_create_space_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17662,14 +18812,13 @@ def test_create_reaction_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_reaction( - gc_reaction.CreateReactionRequest(), - parent="parent_value", - reaction=gc_reaction.Reaction(name="name_value"), + client.create_space( + gc_space.CreateSpaceRequest(), + space=gc_space.Space(name="name_value"), ) -def test_list_reactions_rest_use_cached_wrapped_rpc(): +def test_set_up_space_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17683,35 +18832,32 @@ def test_list_reactions_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_reactions in client._transport._wrapped_methods + assert client._transport.set_up_space in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_reactions] = mock_rpc + client._transport._wrapped_methods[client._transport.set_up_space] = mock_rpc request = {} - client.list_reactions(request) + client.set_up_space(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_reactions(request) + client.set_up_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_reactions_rest_required_fields( - request_type=reaction.ListReactionsRequest, -): +def test_set_up_space_rest_required_fields(request_type=space_setup.SetUpSpaceRequest): transport_class = transports.ChatServiceRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17722,29 +18868,17 @@ def test_list_reactions_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_reactions._get_unset_required_fields(jsonified_request) + ).set_up_space._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_reactions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + ).set_up_space._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17753,7 +18887,7 @@ def test_list_reactions_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = reaction.ListReactionsResponse() + return_value = space.Space() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17765,166 +18899,232 @@ def test_list_reactions_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reaction.ListReactionsResponse.pb(return_value) + return_value = space.Space.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_reactions(request) + response = client.set_up_space(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_reactions_rest_unset_required_fields(): +def test_set_up_space_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_reactions._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.set_up_space._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("space",))) -def test_list_reactions_rest_flattened(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_update_space_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = reaction.ListReactionsResponse() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "spaces/sample1/messages/sample2"} + # Ensure method has been cached + assert client._transport.update_space in client._transport._wrapped_methods - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - mock_args.update(sample_request) + client._transport._wrapped_methods[client._transport.update_space] = mock_rpc - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reaction.ListReactionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + request = {} + client.update_space(request) - client.list_reactions(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=spaces/*/messages/*}/reactions" % client.transport._host, - args[1], - ) + client.update_space(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_list_reactions_rest_flattened_error(transport: str = "rest"): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + +def test_update_space_rest_required_fields(request_type=gc_space.UpdateSpaceRequest): + transport_class = transports.ChatServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_reactions( - reaction.ListReactionsRequest(), - parent="parent_value", + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_space._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_space._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "update_mask", + "use_admin_access", ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -def test_list_reactions_rest_pager(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) + # Designate an appropriate value for the returned response. + return_value = gc_space.Space() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - reaction.ListReactionsResponse( - reactions=[ - reaction.Reaction(), - reaction.Reaction(), - reaction.Reaction(), - ], - next_page_token="abc", - ), - reaction.ListReactionsResponse( - reactions=[], - next_page_token="def", - ), - reaction.ListReactionsResponse( - reactions=[ - reaction.Reaction(), - ], - next_page_token="ghi", - ), - reaction.ListReactionsResponse( - reactions=[ - reaction.Reaction(), - reaction.Reaction(), - ], - ), + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gc_space.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_space(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_space_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_space._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "useAdminAccess", + ) ) - # Two responses for two calls - response = response + response + & set( + ( + "space", + "updateMask", + ) + ) + ) - # Wrap the values into proper Response objs - response = tuple(reaction.ListReactionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {"parent": "spaces/sample1/messages/sample2"} +def test_update_space_rest_flattened(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - pager = client.list_reactions(request=sample_request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gc_space.Space() - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, reaction.Reaction) for i in results) + # get arguments that satisfy an http rule for this method + sample_request = {"space": {"name": "spaces/sample1"}} - pages = list(client.list_reactions(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # get truthy value for each flattened field + mock_args = dict( + space=gc_space.Space(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gc_space.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_space(**mock_args) -def test_delete_reaction_rest_use_cached_wrapped_rpc(): + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{space.name=spaces/*}" % client.transport._host, args[1] + ) + + +def test_update_space_rest_flattened_error(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_space( + gc_space.UpdateSpaceRequest(), + space=gc_space.Space(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_space_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17938,31 +19138,29 @@ def test_delete_reaction_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_reaction in client._transport._wrapped_methods + assert client._transport.delete_space in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_reaction] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_space] = mock_rpc request = {} - client.delete_reaction(request) + client.delete_space(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_reaction(request) + client.delete_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_reaction_rest_required_fields( - request_type=reaction.DeleteReactionRequest, -): +def test_delete_space_rest_required_fields(request_type=space.DeleteSpaceRequest): transport_class = transports.ChatServiceRestTransport request_init = {} @@ -17977,7 +19175,7 @@ def test_delete_reaction_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_reaction._get_unset_required_fields(jsonified_request) + ).delete_space._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -17986,7 +19184,9 @@ def test_delete_reaction_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_reaction._get_unset_required_fields(jsonified_request) + ).delete_space._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -18025,23 +19225,23 @@ def test_delete_reaction_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_reaction(request) + response = client.delete_space(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_reaction_rest_unset_required_fields(): +def test_delete_space_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_reaction._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.delete_space._get_unset_required_fields({}) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) -def test_delete_reaction_rest_flattened(): +def test_delete_space_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18053,7 +19253,7 @@ def test_delete_reaction_rest_flattened(): return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"name": "spaces/sample1/messages/sample2/reactions/sample3"} + sample_request = {"name": "spaces/sample1"} # get truthy value for each flattened field mock_args = dict( @@ -18069,19 +19269,18 @@ def test_delete_reaction_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_reaction(**mock_args) + client.delete_space(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=spaces/*/messages/*/reactions/*}" % client.transport._host, - args[1], + "%s/v1/{name=spaces/*}" % client.transport._host, args[1] ) -def test_delete_reaction_rest_flattened_error(transport: str = "rest"): +def test_delete_space_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18090,13 +19289,13 @@ def test_delete_reaction_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_reaction( - reaction.DeleteReactionRequest(), + client.delete_space( + space.DeleteSpaceRequest(), name="name_value", ) -def test_create_custom_emoji_rest_use_cached_wrapped_rpc(): +def test_complete_import_space_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18111,7 +19310,8 @@ def test_create_custom_emoji_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_custom_emoji in client._transport._wrapped_methods + client._transport.complete_import_space + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -18119,29 +19319,30 @@ def test_create_custom_emoji_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_custom_emoji] = ( + client._transport._wrapped_methods[client._transport.complete_import_space] = ( mock_rpc ) request = {} - client.create_custom_emoji(request) + client.complete_import_space(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_custom_emoji(request) + client.complete_import_space(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_custom_emoji_rest_required_fields( - request_type=reaction.CreateCustomEmojiRequest, +def test_complete_import_space_rest_required_fields( + request_type=space.CompleteImportSpaceRequest, ): transport_class = transports.ChatServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18152,17 +19353,21 @@ def test_create_custom_emoji_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_custom_emoji._get_unset_required_fields(jsonified_request) + ).complete_import_space._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_custom_emoji._get_unset_required_fields(jsonified_request) + ).complete_import_space._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18171,7 +19376,7 @@ def test_create_custom_emoji_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = reaction.CustomEmoji() + return_value = space.CompleteImportSpaceResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18193,86 +19398,165 @@ def test_create_custom_emoji_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = reaction.CustomEmoji.pb(return_value) + return_value = space.CompleteImportSpaceResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_custom_emoji(request) + response = client.complete_import_space(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_custom_emoji_rest_unset_required_fields(): +def test_complete_import_space_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_custom_emoji._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("customEmoji",))) + unset_fields = transport.complete_import_space._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_create_custom_emoji_rest_flattened(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_find_direct_message_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = reaction.CustomEmoji() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # get arguments that satisfy an http rule for this method - sample_request = {} + # Ensure method has been cached + assert ( + client._transport.find_direct_message in client._transport._wrapped_methods + ) - # get truthy value for each flattened field - mock_args = dict( - custom_emoji=reaction.CustomEmoji(name="name_value"), + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.find_direct_message] = ( + mock_rpc ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reaction.CustomEmoji.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + request = {} + client.find_direct_message(request) - client.create_custom_emoji(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/customEmojis" % client.transport._host, args[1] - ) + client.find_direct_message(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_find_direct_message_rest_required_fields( + request_type=space.FindDirectMessageRequest, +): + transport_class = transports.ChatServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "name" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).find_direct_message._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "name" in jsonified_request + assert jsonified_request["name"] == request_init["name"] + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).find_direct_message._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_create_custom_emoji_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_custom_emoji( - reaction.CreateCustomEmojiRequest(), - custom_emoji=reaction.CustomEmoji(name="name_value"), - ) + # Designate an appropriate value for the returned response. + return_value = space.Space() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = space.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) -def test_get_custom_emoji_rest_use_cached_wrapped_rpc(): + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.find_direct_message(request) + + expected_params = [ + ( + "name", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_find_direct_message_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.find_direct_message._get_unset_required_fields({}) + assert set(unset_fields) == (set(("name",)) & set(("name",))) + + +def test_create_membership_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18286,37 +19570,37 @@ def test_get_custom_emoji_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_custom_emoji in client._transport._wrapped_methods + assert client._transport.create_membership in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_custom_emoji] = ( + client._transport._wrapped_methods[client._transport.create_membership] = ( mock_rpc ) request = {} - client.get_custom_emoji(request) + client.create_membership(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_custom_emoji(request) + client.create_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_custom_emoji_rest_required_fields( - request_type=reaction.GetCustomEmojiRequest, +def test_create_membership_rest_required_fields( + request_type=gc_membership.CreateMembershipRequest, ): transport_class = transports.ChatServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18327,21 +19611,23 @@ def test_get_custom_emoji_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_custom_emoji._get_unset_required_fields(jsonified_request) + ).create_membership._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_custom_emoji._get_unset_required_fields(jsonified_request) + ).create_membership._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18350,7 +19636,7 @@ def test_get_custom_emoji_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = reaction.CustomEmoji() + return_value = gc_membership.Membership() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18362,39 +19648,48 @@ def test_get_custom_emoji_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reaction.CustomEmoji.pb(return_value) + return_value = gc_membership.Membership.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_custom_emoji(request) + response = client.create_membership(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_custom_emoji_rest_unset_required_fields(): +def test_create_membership_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_custom_emoji._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_membership._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("useAdminAccess",)) + & set( + ( + "parent", + "membership", + ) + ) + ) -def test_get_custom_emoji_rest_flattened(): +def test_create_membership_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18403,14 +19698,15 @@ def test_get_custom_emoji_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reaction.CustomEmoji() + return_value = gc_membership.Membership() # get arguments that satisfy an http rule for this method - sample_request = {"name": "customEmojis/sample1"} + sample_request = {"parent": "spaces/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + membership=gc_membership.Membership(name="name_value"), ) mock_args.update(sample_request) @@ -18418,24 +19714,24 @@ def test_get_custom_emoji_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reaction.CustomEmoji.pb(return_value) + return_value = gc_membership.Membership.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_custom_emoji(**mock_args) + client.create_membership(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=customEmojis/*}" % client.transport._host, args[1] + "%s/v1/{parent=spaces/*}/members" % client.transport._host, args[1] ) -def test_get_custom_emoji_rest_flattened_error(transport: str = "rest"): +def test_create_membership_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18444,13 +19740,14 @@ def test_get_custom_emoji_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_custom_emoji( - reaction.GetCustomEmojiRequest(), - name="name_value", + client.create_membership( + gc_membership.CreateMembershipRequest(), + parent="parent_value", + membership=gc_membership.Membership(name="name_value"), ) -def test_list_custom_emojis_rest_use_cached_wrapped_rpc(): +def test_update_membership_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18464,165 +19761,64 @@ def test_list_custom_emojis_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_custom_emojis in client._transport._wrapped_methods - ) + assert client._transport.update_membership in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_custom_emojis] = ( + client._transport._wrapped_methods[client._transport.update_membership] = ( mock_rpc ) request = {} - client.list_custom_emojis(request) + client.update_membership(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_custom_emojis(request) + client.update_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_custom_emojis_rest_pager(transport: str = "rest"): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - reaction.ListCustomEmojisResponse( - custom_emojis=[ - reaction.CustomEmoji(), - reaction.CustomEmoji(), - reaction.CustomEmoji(), - ], - next_page_token="abc", - ), - reaction.ListCustomEmojisResponse( - custom_emojis=[], - next_page_token="def", - ), - reaction.ListCustomEmojisResponse( - custom_emojis=[ - reaction.CustomEmoji(), - ], - next_page_token="ghi", - ), - reaction.ListCustomEmojisResponse( - custom_emojis=[ - reaction.CustomEmoji(), - reaction.CustomEmoji(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(reaction.ListCustomEmojisResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {} - - pager = client.list_custom_emojis(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, reaction.CustomEmoji) for i in results) - - pages = list(client.list_custom_emojis(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_delete_custom_emoji_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.delete_custom_emoji in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_custom_emoji] = ( - mock_rpc - ) - - request = {} - client.delete_custom_emoji(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_custom_emoji(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_custom_emoji_rest_required_fields( - request_type=reaction.DeleteCustomEmojiRequest, -): - transport_class = transports.ChatServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +def test_update_membership_rest_required_fields( + request_type=gc_membership.UpdateMembershipRequest, +): + transport_class = transports.ChatServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_custom_emoji._get_unset_required_fields(jsonified_request) + ).update_membership._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_custom_emoji._get_unset_required_fields(jsonified_request) + ).update_membership._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "update_mask", + "use_admin_access", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18631,7 +19827,7 @@ def test_delete_custom_emoji_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = gc_membership.Membership() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18643,36 +19839,53 @@ def test_delete_custom_emoji_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = gc_membership.Membership.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_custom_emoji(request) + response = client.update_membership(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_custom_emoji_rest_unset_required_fields(): +def test_update_membership_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_custom_emoji._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_membership._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "useAdminAccess", + ) + ) + & set( + ( + "membership", + "updateMask", + ) + ) + ) -def test_delete_custom_emoji_rest_flattened(): +def test_update_membership_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18681,37 +19894,41 @@ def test_delete_custom_emoji_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = gc_membership.Membership() # get arguments that satisfy an http rule for this method - sample_request = {"name": "customEmojis/sample1"} + sample_request = {"membership": {"name": "spaces/sample1/members/sample2"}} # get truthy value for each flattened field mock_args = dict( - name="name_value", + membership=gc_membership.Membership(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = gc_membership.Membership.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_custom_emoji(**mock_args) + client.update_membership(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=customEmojis/*}" % client.transport._host, args[1] + "%s/v1/{membership.name=spaces/*/members/*}" % client.transport._host, + args[1], ) -def test_delete_custom_emoji_rest_flattened_error(transport: str = "rest"): +def test_update_membership_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18720,13 +19937,14 @@ def test_delete_custom_emoji_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_custom_emoji( - reaction.DeleteCustomEmojiRequest(), - name="name_value", + client.update_membership( + gc_membership.UpdateMembershipRequest(), + membership=gc_membership.Membership(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_space_read_state_rest_use_cached_wrapped_rpc(): +def test_delete_membership_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18740,34 +19958,32 @@ def test_get_space_read_state_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_space_read_state in client._transport._wrapped_methods - ) + assert client._transport.delete_membership in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_space_read_state] = ( + client._transport._wrapped_methods[client._transport.delete_membership] = ( mock_rpc ) request = {} - client.get_space_read_state(request) + client.delete_membership(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_space_read_state(request) + client.delete_membership(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_space_read_state_rest_required_fields( - request_type=space_read_state.GetSpaceReadStateRequest, +def test_delete_membership_rest_required_fields( + request_type=membership.DeleteMembershipRequest, ): transport_class = transports.ChatServiceRestTransport @@ -18783,7 +19999,7 @@ def test_get_space_read_state_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_space_read_state._get_unset_required_fields(jsonified_request) + ).delete_membership._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -18792,7 +20008,9 @@ def test_get_space_read_state_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_space_read_state._get_unset_required_fields(jsonified_request) + ).delete_membership._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -18806,7 +20024,7 @@ def test_get_space_read_state_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = space_read_state.SpaceReadState() + return_value = membership.Membership() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18818,7 +20036,7 @@ def test_get_space_read_state_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result @@ -18827,30 +20045,30 @@ def test_get_space_read_state_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = space_read_state.SpaceReadState.pb(return_value) + return_value = membership.Membership.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_space_read_state(request) + response = client.delete_membership(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_space_read_state_rest_unset_required_fields(): +def test_delete_membership_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_space_read_state._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.delete_membership._get_unset_required_fields({}) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) -def test_get_space_read_state_rest_flattened(): +def test_delete_membership_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18859,10 +20077,10 @@ def test_get_space_read_state_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space_read_state.SpaceReadState() + return_value = membership.Membership() # get arguments that satisfy an http rule for this method - sample_request = {"name": "users/sample1/spaces/sample2/spaceReadState"} + sample_request = {"name": "spaces/sample1/members/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -18874,25 +20092,24 @@ def test_get_space_read_state_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = space_read_state.SpaceReadState.pb(return_value) + return_value = membership.Membership.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_space_read_state(**mock_args) + client.delete_membership(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=users/*/spaces/*/spaceReadState}" % client.transport._host, - args[1], + "%s/v1/{name=spaces/*/members/*}" % client.transport._host, args[1] ) -def test_get_space_read_state_rest_flattened_error(transport: str = "rest"): +def test_delete_membership_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18901,13 +20118,13 @@ def test_get_space_read_state_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_space_read_state( - space_read_state.GetSpaceReadStateRequest(), + client.delete_membership( + membership.DeleteMembershipRequest(), name="name_value", ) -def test_update_space_read_state_rest_use_cached_wrapped_rpc(): +def test_create_reaction_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18921,39 +20138,35 @@ def test_update_space_read_state_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_space_read_state - in client._transport._wrapped_methods - ) + assert client._transport.create_reaction in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_space_read_state - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_reaction] = mock_rpc request = {} - client.update_space_read_state(request) + client.create_reaction(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_space_read_state(request) + client.create_reaction(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_space_read_state_rest_required_fields( - request_type=gc_space_read_state.UpdateSpaceReadStateRequest, +def test_create_reaction_rest_required_fields( + request_type=gc_reaction.CreateReactionRequest, ): transport_class = transports.ChatServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18964,19 +20177,21 @@ def test_update_space_read_state_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_space_read_state._get_unset_required_fields(jsonified_request) + ).create_reaction._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_space_read_state._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + ).create_reaction._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18985,7 +20200,7 @@ def test_update_space_read_state_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gc_space_read_state.SpaceReadState() + return_value = gc_reaction.Reaction() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18997,7 +20212,7 @@ def test_update_space_read_state_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -19007,38 +20222,38 @@ def test_update_space_read_state_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = gc_space_read_state.SpaceReadState.pb(return_value) + return_value = gc_reaction.Reaction.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_space_read_state(request) + response = client.create_reaction(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_space_read_state_rest_unset_required_fields(): +def test_create_reaction_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_space_read_state._get_unset_required_fields({}) + unset_fields = transport.create_reaction._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("updateMask",)) + set(()) & set( ( - "spaceReadState", - "updateMask", + "parent", + "reaction", ) ) ) -def test_update_space_read_state_rest_flattened(): +def test_create_reaction_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19047,17 +20262,15 @@ def test_update_space_read_state_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gc_space_read_state.SpaceReadState() + return_value = gc_reaction.Reaction() # get arguments that satisfy an http rule for this method - sample_request = { - "space_read_state": {"name": "users/sample1/spaces/sample2/spaceReadState"} - } + sample_request = {"parent": "spaces/sample1/messages/sample2"} # get truthy value for each flattened field mock_args = dict( - space_read_state=gc_space_read_state.SpaceReadState(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + reaction=gc_reaction.Reaction(name="name_value"), ) mock_args.update(sample_request) @@ -19065,26 +20278,25 @@ def test_update_space_read_state_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gc_space_read_state.SpaceReadState.pb(return_value) + return_value = gc_reaction.Reaction.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_space_read_state(**mock_args) + client.create_reaction(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{space_read_state.name=users/*/spaces/*/spaceReadState}" - % client.transport._host, + "%s/v1/{parent=spaces/*/messages/*}/reactions" % client.transport._host, args[1], ) -def test_update_space_read_state_rest_flattened_error(transport: str = "rest"): +def test_create_reaction_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19093,14 +20305,14 @@ def test_update_space_read_state_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_space_read_state( - gc_space_read_state.UpdateSpaceReadStateRequest(), - space_read_state=gc_space_read_state.SpaceReadState(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_reaction( + gc_reaction.CreateReactionRequest(), + parent="parent_value", + reaction=gc_reaction.Reaction(name="name_value"), ) -def test_get_thread_read_state_rest_use_cached_wrapped_rpc(): +def test_list_reactions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19114,40 +20326,35 @@ def test_get_thread_read_state_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_thread_read_state - in client._transport._wrapped_methods - ) + assert client._transport.list_reactions in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_thread_read_state] = ( - mock_rpc - ) + client._transport._wrapped_methods[client._transport.list_reactions] = mock_rpc request = {} - client.get_thread_read_state(request) + client.list_reactions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_thread_read_state(request) + client.list_reactions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_thread_read_state_rest_required_fields( - request_type=thread_read_state.GetThreadReadStateRequest, +def test_list_reactions_rest_required_fields( + request_type=reaction.ListReactionsRequest, ): transport_class = transports.ChatServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19158,21 +20365,29 @@ def test_get_thread_read_state_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_thread_read_state._get_unset_required_fields(jsonified_request) + ).list_reactions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_thread_read_state._get_unset_required_fields(jsonified_request) + ).list_reactions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19181,7 +20396,7 @@ def test_get_thread_read_state_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = thread_read_state.ThreadReadState() + return_value = reaction.ListReactionsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19202,30 +20417,39 @@ def test_get_thread_read_state_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = thread_read_state.ThreadReadState.pb(return_value) + return_value = reaction.ListReactionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_thread_read_state(request) + response = client.list_reactions(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_thread_read_state_rest_unset_required_fields(): +def test_list_reactions_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_thread_read_state._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_reactions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -def test_get_thread_read_state_rest_flattened(): +def test_list_reactions_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19234,16 +20458,14 @@ def test_get_thread_read_state_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = thread_read_state.ThreadReadState() + return_value = reaction.ListReactionsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "users/sample1/spaces/sample2/threads/sample3/threadReadState" - } + sample_request = {"parent": "spaces/sample1/messages/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -19251,26 +20473,25 @@ def test_get_thread_read_state_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = thread_read_state.ThreadReadState.pb(return_value) + return_value = reaction.ListReactionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_thread_read_state(**mock_args) + client.list_reactions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=users/*/spaces/*/threads/*/threadReadState}" - % client.transport._host, + "%s/v1/{parent=spaces/*/messages/*}/reactions" % client.transport._host, args[1], ) -def test_get_thread_read_state_rest_flattened_error(transport: str = "rest"): +def test_list_reactions_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19279,13 +20500,74 @@ def test_get_thread_read_state_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_thread_read_state( - thread_read_state.GetThreadReadStateRequest(), - name="name_value", + client.list_reactions( + reaction.ListReactionsRequest(), + parent="parent_value", ) -def test_get_space_event_rest_use_cached_wrapped_rpc(): +def test_list_reactions_rest_pager(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + reaction.ListReactionsResponse( + reactions=[ + reaction.Reaction(), + reaction.Reaction(), + reaction.Reaction(), + ], + next_page_token="abc", + ), + reaction.ListReactionsResponse( + reactions=[], + next_page_token="def", + ), + reaction.ListReactionsResponse( + reactions=[ + reaction.Reaction(), + ], + next_page_token="ghi", + ), + reaction.ListReactionsResponse( + reactions=[ + reaction.Reaction(), + reaction.Reaction(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(reaction.ListReactionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "spaces/sample1/messages/sample2"} + + pager = client.list_reactions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, reaction.Reaction) for i in results) + + pages = list(client.list_reactions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_delete_reaction_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19299,30 +20581,30 @@ def test_get_space_event_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_space_event in client._transport._wrapped_methods + assert client._transport.delete_reaction in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_space_event] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_reaction] = mock_rpc request = {} - client.get_space_event(request) + client.delete_reaction(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_space_event(request) + client.delete_reaction(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_space_event_rest_required_fields( - request_type=space_event.GetSpaceEventRequest, +def test_delete_reaction_rest_required_fields( + request_type=reaction.DeleteReactionRequest, ): transport_class = transports.ChatServiceRestTransport @@ -19338,7 +20620,7 @@ def test_get_space_event_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_space_event._get_unset_required_fields(jsonified_request) + ).delete_reaction._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -19347,7 +20629,7 @@ def test_get_space_event_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_space_event._get_unset_required_fields(jsonified_request) + ).delete_reaction._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -19361,7 +20643,7 @@ def test_get_space_event_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = space_event.SpaceEvent() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19373,39 +20655,36 @@ def test_get_space_event_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = space_event.SpaceEvent.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_space_event(request) + response = client.delete_reaction(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_space_event_rest_unset_required_fields(): +def test_delete_reaction_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_space_event._get_unset_required_fields({}) + unset_fields = transport.delete_reaction._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_space_event_rest_flattened(): +def test_delete_reaction_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19414,10 +20693,10 @@ def test_get_space_event_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space_event.SpaceEvent() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"name": "spaces/sample1/spaceEvents/sample2"} + sample_request = {"name": "spaces/sample1/messages/sample2/reactions/sample3"} # get truthy value for each flattened field mock_args = dict( @@ -19428,25 +20707,24 @@ def test_get_space_event_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = space_event.SpaceEvent.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_space_event(**mock_args) + client.delete_reaction(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=spaces/*/spaceEvents/*}" % client.transport._host, args[1] + "%s/v1/{name=spaces/*/messages/*/reactions/*}" % client.transport._host, + args[1], ) -def test_get_space_event_rest_flattened_error(transport: str = "rest"): +def test_delete_reaction_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19455,13 +20733,13 @@ def test_get_space_event_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_space_event( - space_event.GetSpaceEventRequest(), + client.delete_reaction( + reaction.DeleteReactionRequest(), name="name_value", ) -def test_list_space_events_rest_use_cached_wrapped_rpc(): +def test_create_custom_emoji_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19475,38 +20753,38 @@ def test_list_space_events_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_space_events in client._transport._wrapped_methods + assert ( + client._transport.create_custom_emoji in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_space_events] = ( + client._transport._wrapped_methods[client._transport.create_custom_emoji] = ( mock_rpc ) request = {} - client.list_space_events(request) + client.create_custom_emoji(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_space_events(request) + client.create_custom_emoji(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_space_events_rest_required_fields( - request_type=space_event.ListSpaceEventsRequest, +def test_create_custom_emoji_rest_required_fields( + request_type=reaction.CreateCustomEmojiRequest, ): transport_class = transports.ChatServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["filter"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -19514,38 +20792,20 @@ def test_list_space_events_rest_required_fields( ) # verify fields with default values are dropped - assert "filter" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_space_events._get_unset_required_fields(jsonified_request) + ).create_custom_emoji._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "filter" in jsonified_request - assert jsonified_request["filter"] == request_init["filter"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["filter"] = "filter_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_space_events._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + ).create_custom_emoji._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "filter" in jsonified_request - assert jsonified_request["filter"] == "filter_value" client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -19554,7 +20814,7 @@ def test_list_space_events_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = space_event.ListSpaceEventsResponse() + return_value = reaction.CustomEmoji() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19566,59 +20826,40 @@ def test_list_space_events_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = space_event.ListSpaceEventsResponse.pb(return_value) + return_value = reaction.CustomEmoji.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_space_events(request) + response = client.create_custom_emoji(request) - expected_params = [ - ( - "filter", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_space_events_rest_unset_required_fields(): +def test_create_custom_emoji_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_space_events._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set( - ( - "parent", - "filter", - ) - ) - ) + unset_fields = transport.create_custom_emoji._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("customEmoji",))) -def test_list_space_events_rest_flattened(): +def test_create_custom_emoji_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19627,15 +20868,14 @@ def test_list_space_events_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space_event.ListSpaceEventsResponse() + return_value = reaction.CustomEmoji() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "spaces/sample1"} + sample_request = {} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - filter="filter_value", + custom_emoji=reaction.CustomEmoji(name="name_value"), ) mock_args.update(sample_request) @@ -19643,24 +20883,24 @@ def test_list_space_events_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = space_event.ListSpaceEventsResponse.pb(return_value) + return_value = reaction.CustomEmoji.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_space_events(**mock_args) + client.create_custom_emoji(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=spaces/*}/spaceEvents" % client.transport._host, args[1] + "%s/v1/customEmojis" % client.transport._host, args[1] ) -def test_list_space_events_rest_flattened_error(transport: str = "rest"): +def test_create_custom_emoji_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19669,77 +20909,13 @@ def test_list_space_events_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_space_events( - space_event.ListSpaceEventsRequest(), - parent="parent_value", - filter="filter_value", - ) - - -def test_list_space_events_rest_pager(transport: str = "rest"): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - space_event.ListSpaceEventsResponse( - space_events=[ - space_event.SpaceEvent(), - space_event.SpaceEvent(), - space_event.SpaceEvent(), - ], - next_page_token="abc", - ), - space_event.ListSpaceEventsResponse( - space_events=[], - next_page_token="def", - ), - space_event.ListSpaceEventsResponse( - space_events=[ - space_event.SpaceEvent(), - ], - next_page_token="ghi", - ), - space_event.ListSpaceEventsResponse( - space_events=[ - space_event.SpaceEvent(), - space_event.SpaceEvent(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - space_event.ListSpaceEventsResponse.to_json(x) for x in response + client.create_custom_emoji( + reaction.CreateCustomEmojiRequest(), + custom_emoji=reaction.CustomEmoji(name="name_value"), ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "spaces/sample1"} - - pager = client.list_space_events(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, space_event.SpaceEvent) for i in results) - - pages = list(client.list_space_events(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token -def test_get_space_notification_setting_rest_use_cached_wrapped_rpc(): +def test_get_custom_emoji_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19753,35 +20929,32 @@ def test_get_space_notification_setting_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_space_notification_setting - in client._transport._wrapped_methods - ) + assert client._transport.get_custom_emoji in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_space_notification_setting - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_custom_emoji] = ( + mock_rpc + ) request = {} - client.get_space_notification_setting(request) + client.get_custom_emoji(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_space_notification_setting(request) + client.get_custom_emoji(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_space_notification_setting_rest_required_fields( - request_type=space_notification_setting.GetSpaceNotificationSettingRequest, +def test_get_custom_emoji_rest_required_fields( + request_type=reaction.GetCustomEmojiRequest, ): transport_class = transports.ChatServiceRestTransport @@ -19797,7 +20970,7 @@ def test_get_space_notification_setting_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_space_notification_setting._get_unset_required_fields(jsonified_request) + ).get_custom_emoji._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -19806,7 +20979,7 @@ def test_get_space_notification_setting_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_space_notification_setting._get_unset_required_fields(jsonified_request) + ).get_custom_emoji._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -19820,7 +20993,7 @@ def test_get_space_notification_setting_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = space_notification_setting.SpaceNotificationSetting() + return_value = reaction.CustomEmoji() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -19841,34 +21014,30 @@ def test_get_space_notification_setting_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = space_notification_setting.SpaceNotificationSetting.pb( - return_value - ) + return_value = reaction.CustomEmoji.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_space_notification_setting(request) + response = client.get_custom_emoji(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_space_notification_setting_rest_unset_required_fields(): +def test_get_custom_emoji_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_space_notification_setting._get_unset_required_fields( - {} - ) + unset_fields = transport.get_custom_emoji._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_space_notification_setting_rest_flattened(): +def test_get_custom_emoji_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -19877,12 +21046,10 @@ def test_get_space_notification_setting_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space_notification_setting.SpaceNotificationSetting() + return_value = reaction.CustomEmoji() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "users/sample1/spaces/sample2/spaceNotificationSetting" - } + sample_request = {"name": "customEmojis/sample1"} # get truthy value for each flattened field mock_args = dict( @@ -19894,28 +21061,24 @@ def test_get_space_notification_setting_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = space_notification_setting.SpaceNotificationSetting.pb( - return_value - ) + return_value = reaction.CustomEmoji.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_space_notification_setting(**mock_args) + client.get_custom_emoji(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=users/*/spaces/*/spaceNotificationSetting}" - % client.transport._host, - args[1], + "%s/v1/{name=customEmojis/*}" % client.transport._host, args[1] ) -def test_get_space_notification_setting_rest_flattened_error(transport: str = "rest"): +def test_get_custom_emoji_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -19924,13 +21087,13 @@ def test_get_space_notification_setting_rest_flattened_error(transport: str = "r # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_space_notification_setting( - space_notification_setting.GetSpaceNotificationSettingRequest(), + client.get_custom_emoji( + reaction.GetCustomEmojiRequest(), name="name_value", ) -def test_update_space_notification_setting_rest_use_cached_wrapped_rpc(): +def test_list_custom_emojis_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -19945,8 +21108,7 @@ def test_update_space_notification_setting_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_space_notification_setting - in client._transport._wrapped_methods + client._transport.list_custom_emojis in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -19954,61 +21116,165 @@ def test_update_space_notification_setting_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_space_notification_setting - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_custom_emojis] = ( + mock_rpc + ) request = {} - client.update_space_notification_setting(request) + client.list_custom_emojis(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_space_notification_setting(request) + client.list_custom_emojis(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_space_notification_setting_rest_required_fields( - request_type=gc_space_notification_setting.UpdateSpaceNotificationSettingRequest, -): - transport_class = transports.ChatServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_space_notification_setting._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_space_notification_setting._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - +def test_list_custom_emojis_rest_pager(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + reaction.ListCustomEmojisResponse( + custom_emojis=[ + reaction.CustomEmoji(), + reaction.CustomEmoji(), + reaction.CustomEmoji(), + ], + next_page_token="abc", + ), + reaction.ListCustomEmojisResponse( + custom_emojis=[], + next_page_token="def", + ), + reaction.ListCustomEmojisResponse( + custom_emojis=[ + reaction.CustomEmoji(), + ], + next_page_token="ghi", + ), + reaction.ListCustomEmojisResponse( + custom_emojis=[ + reaction.CustomEmoji(), + reaction.CustomEmoji(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(reaction.ListCustomEmojisResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_custom_emojis(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, reaction.CustomEmoji) for i in results) + + pages = list(client.list_custom_emojis(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_delete_custom_emoji_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_custom_emoji in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_custom_emoji] = ( + mock_rpc + ) + + request = {} + client.delete_custom_emoji(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_custom_emoji(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_custom_emoji_rest_required_fields( + request_type=reaction.DeleteCustomEmojiRequest, +): + transport_class = transports.ChatServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_custom_emoji._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_custom_emoji._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gc_space_notification_setting.SpaceNotificationSetting() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20020,52 +21286,36 @@ def test_update_space_notification_setting_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gc_space_notification_setting.SpaceNotificationSetting.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_space_notification_setting(request) + response = client.delete_custom_emoji(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_space_notification_setting_rest_unset_required_fields(): +def test_delete_custom_emoji_rest_unset_required_fields(): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = ( - transport.update_space_notification_setting._get_unset_required_fields({}) - ) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "spaceNotificationSetting", - "updateMask", - ) - ) - ) + unset_fields = transport.delete_custom_emoji._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_update_space_notification_setting_rest_flattened(): +def test_delete_custom_emoji_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20074,52 +21324,37 @@ def test_update_space_notification_setting_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gc_space_notification_setting.SpaceNotificationSetting() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = { - "space_notification_setting": { - "name": "users/sample1/spaces/sample2/spaceNotificationSetting" - } - } + sample_request = {"name": "customEmojis/sample1"} # get truthy value for each flattened field mock_args = dict( - space_notification_setting=gc_space_notification_setting.SpaceNotificationSetting( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gc_space_notification_setting.SpaceNotificationSetting.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_space_notification_setting(**mock_args) + client.delete_custom_emoji(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{space_notification_setting.name=users/*/spaces/*/spaceNotificationSetting}" - % client.transport._host, - args[1], + "%s/v1/{name=customEmojis/*}" % client.transport._host, args[1] ) -def test_update_space_notification_setting_rest_flattened_error( - transport: str = "rest", -): +def test_delete_custom_emoji_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20128,2141 +21363,5256 @@ def test_update_space_notification_setting_rest_flattened_error( # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_space_notification_setting( - gc_space_notification_setting.UpdateSpaceNotificationSettingRequest(), - space_notification_setting=gc_space_notification_setting.SpaceNotificationSetting( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_custom_emoji( + reaction.DeleteCustomEmojiRequest(), + name="name_value", ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ChatServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): +def test_get_space_read_state_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.ChatServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ChatServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # It is an error to provide an api_key and a transport instance. - transport = transports.ChatServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ChatServiceClient( - client_options=options, - transport=transport, + # Ensure method has been cached + assert ( + client._transport.get_space_read_state in client._transport._wrapped_methods ) - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ChatServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ChatServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ChatServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_space_read_state] = ( + mock_rpc ) + request = {} + client.get_space_read_state(request) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ChatServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ChatServiceClient(transport=transport) - assert client.transport is transport + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.get_space_read_state(request) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ChatServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - transport = transports.ChatServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel +def test_get_space_read_state_rest_required_fields( + request_type=space_read_state.GetSpaceReadStateRequest, +): + transport_class = transports.ChatServiceRestTransport -@pytest.mark.parametrize( - "transport_class", - [ - transports.ChatServiceGrpcTransport, - transports.ChatServiceGrpcAsyncIOTransport, - transports.ChatServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -def test_transport_kind_grpc(): - transport = ChatServiceClient.get_transport_class("grpc")( + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" + ).get_space_read_state._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with default values are now present -def test_initialize_client_w_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" - ) - assert client is not None + jsonified_request["name"] = "name_value" + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_space_read_state._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_message_empty_call_grpc(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_message), "__call__") as call: - call.return_value = gc_message.Message() - client.create_message(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gc_message.CreateMessageRequest() - - assert args[0] == request_msg + # Designate an appropriate value for the returned response. + return_value = space_read_state.SpaceReadState() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_messages_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Convert return value to protobuf type + return_value = space_read_state.SpaceReadState.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_messages), "__call__") as call: - call.return_value = message.ListMessagesResponse() - client.list_messages(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = message.ListMessagesRequest() + response = client.get_space_read_state(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_memberships_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +def test_get_space_read_state_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_memberships), "__call__") as call: - call.return_value = membership.ListMembershipsResponse() - client.list_memberships(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = membership.ListMembershipsRequest() - - assert args[0] == request_msg + unset_fields = transport.get_space_read_state._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_membership_empty_call_grpc(): +def test_get_space_read_state_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_membership), "__call__") as call: - call.return_value = membership.Membership() - client.get_membership(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = membership.GetMembershipRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = space_read_state.SpaceReadState() + # get arguments that satisfy an http rule for this method + sample_request = {"name": "users/sample1/spaces/sample2/spaceReadState"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_message_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_message), "__call__") as call: - call.return_value = message.Message() - client.get_message(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = space_read_state.SpaceReadState.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = message.GetMessageRequest() + client.get_space_read_state(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=users/*/spaces/*/spaceReadState}" % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_message_empty_call_grpc(): +def test_get_space_read_state_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_message), "__call__") as call: - call.return_value = gc_message.Message() - client.update_message(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gc_message.UpdateMessageRequest() - - assert args[0] == request_msg + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_space_read_state( + space_read_state.GetSpaceReadStateRequest(), + name="name_value", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_message_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) +def test_update_space_read_state_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_message), "__call__") as call: - call.return_value = None - client.delete_message(request=None) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = message.DeleteMessageRequest() + # Ensure method has been cached + assert ( + client._transport.update_space_read_state + in client._transport._wrapped_methods + ) - assert args[0] == request_msg + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_space_read_state + ] = mock_rpc + request = {} + client.update_space_read_state(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_attachment_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_attachment), "__call__") as call: - call.return_value = attachment.Attachment() - client.get_attachment(request=None) + client.update_space_read_state(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = attachment.GetAttachmentRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - assert args[0] == request_msg +def test_update_space_read_state_rest_required_fields( + request_type=gc_space_read_state.UpdateSpaceReadStateRequest, +): + transport_class = transports.ChatServiceRestTransport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_upload_attachment_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.upload_attachment), "__call__" - ) as call: - call.return_value = attachment.UploadAttachmentResponse() - client.upload_attachment(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = attachment.UploadAttachmentRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_space_read_state._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_space_read_state._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_spaces_empty_call_grpc(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_spaces), "__call__") as call: - call.return_value = space.ListSpacesResponse() - client.list_spaces(request=None) + # Designate an appropriate value for the returned response. + return_value = gc_space_read_state.SpaceReadState() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space.ListSpacesRequest() + response_value = Response() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = gc_space_read_state.SpaceReadState.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_spaces_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + response = client.update_space_read_state(request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: - call.return_value = space.SearchSpacesResponse() - client.search_spaces(request=None) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space.SearchSpacesRequest() - assert args[0] == request_msg +def test_update_space_read_state_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_space_read_state._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "spaceReadState", + "updateMask", + ) + ) + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_space_empty_call_grpc(): +def test_update_space_read_state_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_space), "__call__") as call: - call.return_value = space.Space() - client.get_space(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space.GetSpaceRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gc_space_read_state.SpaceReadState() + # get arguments that satisfy an http rule for this method + sample_request = { + "space_read_state": {"name": "users/sample1/spaces/sample2/spaceReadState"} + } -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_space_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + space_read_state=gc_space_read_state.SpaceReadState(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_space), "__call__") as call: - call.return_value = gc_space.Space() - client.create_space(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gc_space_read_state.SpaceReadState.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gc_space.CreateSpaceRequest() + client.update_space_read_state(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{space_read_state.name=users/*/spaces/*/spaceReadState}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_up_space_empty_call_grpc(): +def test_update_space_read_state_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.set_up_space), "__call__") as call: - call.return_value = space.Space() - client.set_up_space(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space_setup.SetUpSpaceRequest() - - assert args[0] == request_msg + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_space_read_state( + gc_space_read_state.UpdateSpaceReadStateRequest(), + space_read_state=gc_space_read_state.SpaceReadState(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_space_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) +def test_get_thread_read_state_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_space), "__call__") as call: - call.return_value = gc_space.Space() - client.update_space(request=None) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gc_space.UpdateSpaceRequest() + # Ensure method has been cached + assert ( + client._transport.get_thread_read_state + in client._transport._wrapped_methods + ) - assert args[0] == request_msg + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_thread_read_state] = ( + mock_rpc + ) + request = {} + client.get_thread_read_state(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_space_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_space), "__call__") as call: - call.return_value = None - client.delete_space(request=None) + client.get_thread_read_state(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space.DeleteSpaceRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - assert args[0] == request_msg +def test_get_thread_read_state_rest_required_fields( + request_type=thread_read_state.GetThreadReadStateRequest, +): + transport_class = transports.ChatServiceRestTransport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_complete_import_space_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.complete_import_space), "__call__" - ) as call: - call.return_value = space.CompleteImportSpaceResponse() - client.complete_import_space(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space.CompleteImportSpaceRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_thread_read_state._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_thread_read_state._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_find_direct_message_empty_call_grpc(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.find_direct_message), "__call__" - ) as call: - call.return_value = space.Space() - client.find_direct_message(request=None) + # Designate an appropriate value for the returned response. + return_value = thread_read_state.ThreadReadState() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space.FindDirectMessageRequest() + response_value = Response() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = thread_read_state.ThreadReadState.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_membership_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + response = client.get_thread_read_state(request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_membership), "__call__" - ) as call: - call.return_value = gc_membership.Membership() - client.create_membership(request=None) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gc_membership.CreateMembershipRequest() - assert args[0] == request_msg +def test_get_thread_read_state_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + unset_fields = transport.get_thread_read_state._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_membership_empty_call_grpc(): + +def test_get_thread_read_state_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_membership), "__call__" - ) as call: - call.return_value = gc_membership.Membership() - client.update_membership(request=None) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = thread_read_state.ThreadReadState() - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gc_membership.UpdateMembershipRequest() + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "users/sample1/spaces/sample2/threads/sample3/threadReadState" + } - assert args[0] == request_msg + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = thread_read_state.ThreadReadState.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_thread_read_state(**mock_args) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_membership_empty_call_grpc(): + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=users/*/spaces/*/threads/*/threadReadState}" + % client.transport._host, + args[1], + ) + + +def test_get_thread_read_state_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_membership), "__call__" - ) as call: - call.return_value = membership.Membership() - client.delete_membership(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = membership.DeleteMembershipRequest() - - assert args[0] == request_msg + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_thread_read_state( + thread_read_state.GetThreadReadStateRequest(), + name="name_value", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_reaction_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) +def test_get_space_event_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_reaction), "__call__") as call: - call.return_value = gc_reaction.Reaction() - client.create_reaction(request=None) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gc_reaction.CreateReactionRequest() + # Ensure method has been cached + assert client._transport.get_space_event in client._transport._wrapped_methods - assert args[0] == request_msg + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_space_event] = mock_rpc + request = {} + client.get_space_event(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_reactions_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_reactions), "__call__") as call: - call.return_value = reaction.ListReactionsResponse() - client.list_reactions(request=None) + client.get_space_event(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reaction.ListReactionsRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - assert args[0] == request_msg +def test_get_space_event_rest_required_fields( + request_type=space_event.GetSpaceEventRequest, +): + transport_class = transports.ChatServiceRestTransport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_reaction_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_reaction), "__call__") as call: - call.return_value = None - client.delete_reaction(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reaction.DeleteReactionRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_space_event._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_space_event._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_custom_emoji_empty_call_grpc(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_custom_emoji), "__call__" - ) as call: - call.return_value = reaction.CustomEmoji() - client.create_custom_emoji(request=None) + # Designate an appropriate value for the returned response. + return_value = space_event.SpaceEvent() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reaction.CreateCustomEmojiRequest() + response_value = Response() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = space_event.SpaceEvent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_custom_emoji_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + response = client.get_space_event(request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_custom_emoji), "__call__") as call: - call.return_value = reaction.CustomEmoji() - client.get_custom_emoji(request=None) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reaction.GetCustomEmojiRequest() - assert args[0] == request_msg +def test_get_space_event_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_space_event._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_custom_emojis_empty_call_grpc(): +def test_get_space_event_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_custom_emojis), "__call__" - ) as call: - call.return_value = reaction.ListCustomEmojisResponse() - client.list_custom_emojis(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reaction.ListCustomEmojisRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = space_event.SpaceEvent() + # get arguments that satisfy an http rule for this method + sample_request = {"name": "spaces/sample1/spaceEvents/sample2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_custom_emoji_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_custom_emoji), "__call__" - ) as call: - call.return_value = None - client.delete_custom_emoji(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = space_event.SpaceEvent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reaction.DeleteCustomEmojiRequest() + client.get_space_event(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=spaces/*/spaceEvents/*}" % client.transport._host, args[1] + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_space_read_state_empty_call_grpc(): +def test_get_space_event_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_space_read_state), "__call__" - ) as call: - call.return_value = space_read_state.SpaceReadState() - client.get_space_read_state(request=None) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_space_event( + space_event.GetSpaceEventRequest(), + name="name_value", + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space_read_state.GetSpaceReadStateRequest() - assert args[0] == request_msg +def test_list_space_events_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_space_read_state_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert client._transport.list_space_events in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_space_read_state), "__call__" - ) as call: - call.return_value = gc_space_read_state.SpaceReadState() - client.update_space_read_state(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_space_events] = ( + mock_rpc + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gc_space_read_state.UpdateSpaceReadStateRequest() + request = {} + client.list_space_events(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.list_space_events(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_thread_read_state_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_thread_read_state), "__call__" - ) as call: - call.return_value = thread_read_state.ThreadReadState() - client.get_thread_read_state(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = thread_read_state.GetThreadReadStateRequest() +def test_list_space_events_rest_required_fields( + request_type=space_event.ListSpaceEventsRequest, +): + transport_class = transports.ChatServiceRestTransport - assert args[0] == request_msg + request_init = {} + request_init["parent"] = "" + request_init["filter"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped + assert "filter" not in jsonified_request -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_space_event_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_space_events._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_space_event), "__call__") as call: - call.return_value = space_event.SpaceEvent() - client.get_space_event(request=None) + # verify required fields with default values are now present + assert "filter" in jsonified_request + assert jsonified_request["filter"] == request_init["filter"] - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space_event.GetSpaceEventRequest() + jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_space_events._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_space_events_empty_call_grpc(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_space_events), "__call__" - ) as call: - call.return_value = space_event.ListSpaceEventsResponse() - client.list_space_events(request=None) + # Designate an appropriate value for the returned response. + return_value = space_event.ListSpaceEventsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space_event.ListSpaceEventsRequest() + response_value = Response() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = space_event.ListSpaceEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_space_notification_setting_empty_call_grpc(): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + response = client.list_space_events(request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_space_notification_setting), "__call__" - ) as call: - call.return_value = space_notification_setting.SpaceNotificationSetting() - client.get_space_notification_setting(request=None) + expected_params = [ + ( + "filter", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space_notification_setting.GetSpaceNotificationSettingRequest() - assert args[0] == request_msg +def test_list_space_events_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_space_events._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set( + ( + "parent", + "filter", + ) + ) + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_space_notification_setting_empty_call_grpc(): +def test_list_space_events_rest_flattened(): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_space_notification_setting), "__call__" - ) as call: - call.return_value = gc_space_notification_setting.SpaceNotificationSetting() - client.update_space_notification_setting(request=None) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = space_event.ListSpaceEventsResponse() - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = ( - gc_space_notification_setting.UpdateSpaceNotificationSettingRequest() + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "spaces/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + filter="filter_value", ) + mock_args.update(sample_request) - assert args[0] == request_msg + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = space_event.ListSpaceEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_space_events(**mock_args) -def test_transport_kind_grpc_asyncio(): - transport = ChatServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=spaces/*}/spaceEvents" % client.transport._host, args[1] + ) -def test_initialize_client_w_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), transport="grpc_asyncio" +def test_list_space_events_rest_flattened_error(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - assert client is not None + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_space_events( + space_event.ListSpaceEventsRequest(), + parent="parent_value", + filter="filter_value", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_message_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_list_space_events_rest_pager(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_message), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gc_message.Message( - name="name_value", - text="text_value", - formatted_text="formatted_text_value", - fallback_text="fallback_text_value", - argument_text="argument_text_value", - thread_reply=True, - client_assigned_message_id="client_assigned_message_id_value", - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + space_event.ListSpaceEventsResponse( + space_events=[ + space_event.SpaceEvent(), + space_event.SpaceEvent(), + space_event.SpaceEvent(), + ], + next_page_token="abc", + ), + space_event.ListSpaceEventsResponse( + space_events=[], + next_page_token="def", + ), + space_event.ListSpaceEventsResponse( + space_events=[ + space_event.SpaceEvent(), + ], + next_page_token="ghi", + ), + space_event.ListSpaceEventsResponse( + space_events=[ + space_event.SpaceEvent(), + space_event.SpaceEvent(), + ], + ), ) - await client.create_message(request=None) + # Two responses for two calls + response = response + response - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gc_message.CreateMessageRequest() + # Wrap the values into proper Response objs + response = tuple( + space_event.ListSpaceEventsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - assert args[0] == request_msg + sample_request = {"parent": "spaces/sample1"} + pager = client.list_space_events(request=sample_request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_messages_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, space_event.SpaceEvent) for i in results) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_messages), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - message.ListMessagesResponse( - next_page_token="next_page_token_value", - ) - ) - await client.list_messages(request=None) + pages = list(client.list_space_events(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = message.ListMessagesRequest() - assert args[0] == request_msg +def test_get_space_notification_setting_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_memberships_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Ensure method has been cached + assert ( + client._transport.get_space_notification_setting + in client._transport._wrapped_methods + ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_memberships), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - membership.ListMembershipsResponse( - next_page_token="next_page_token_value", - ) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - await client.list_memberships(request=None) + client._transport._wrapped_methods[ + client._transport.get_space_notification_setting + ] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = membership.ListMembershipsRequest() + request = {} + client.get_space_notification_setting(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.get_space_notification_setting(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_membership_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_space_notification_setting_rest_required_fields( + request_type=space_notification_setting.GetSpaceNotificationSettingRequest, +): + transport_class = transports.ChatServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_membership), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - membership.Membership( - name="name_value", - state=membership.Membership.MembershipState.JOINED, - role=membership.Membership.MembershipRole.ROLE_MEMBER, - ) - ) - await client.get_membership(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = membership.GetMembershipRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_space_notification_setting._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_message_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_space_notification_setting._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_message), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - message.Message( - name="name_value", - text="text_value", - formatted_text="formatted_text_value", - fallback_text="fallback_text_value", - argument_text="argument_text_value", - thread_reply=True, - client_assigned_message_id="client_assigned_message_id_value", - ) - ) - await client.get_message(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = message.GetMessageRequest() + # Designate an appropriate value for the returned response. + return_value = space_notification_setting.SpaceNotificationSetting() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - assert args[0] == request_msg + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = space_notification_setting.SpaceNotificationSetting.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_message_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_message), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gc_message.Message( - name="name_value", - text="text_value", - formatted_text="formatted_text_value", - fallback_text="fallback_text_value", - argument_text="argument_text_value", - thread_reply=True, - client_assigned_message_id="client_assigned_message_id_value", - ) - ) - await client.update_message(request=None) + response = client.get_space_notification_setting(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gc_message.UpdateMessageRequest() + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - assert args[0] == request_msg +def test_get_space_notification_setting_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_message_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + unset_fields = transport.get_space_notification_setting._get_unset_required_fields( + {} ) + assert set(unset_fields) == (set(()) & set(("name",))) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_message), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_message(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = message.DeleteMessageRequest() +def test_get_space_notification_setting_rest_flattened(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = space_notification_setting.SpaceNotificationSetting() + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "users/sample1/spaces/sample2/spaceNotificationSetting" + } -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_attachment_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_attachment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - attachment.Attachment( - name="name_value", - content_name="content_name_value", - content_type="content_type_value", - thumbnail_uri="thumbnail_uri_value", - download_uri="download_uri_value", - source=attachment.Attachment.Source.DRIVE_FILE, - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = space_notification_setting.SpaceNotificationSetting.pb( + return_value ) - await client.get_attachment(request=None) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = attachment.GetAttachmentRequest() + client.get_space_notification_setting(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=users/*/spaces/*/spaceNotificationSetting}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_upload_attachment_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_get_space_notification_setting_rest_flattened_error(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.upload_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - attachment.UploadAttachmentResponse() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_space_notification_setting( + space_notification_setting.GetSpaceNotificationSettingRequest(), + name="name_value", ) - await client.upload_attachment(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = attachment.UploadAttachmentRequest() - - assert args[0] == request_msg +def test_update_space_notification_setting_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_spaces_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_spaces), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - space.ListSpacesResponse( - next_page_token="next_page_token_value", - ) + # Ensure method has been cached + assert ( + client._transport.update_space_notification_setting + in client._transport._wrapped_methods ) - await client.list_spaces(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space.ListSpacesRequest() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_space_notification_setting + ] = mock_rpc - assert args[0] == request_msg + request = {} + client.update_space_notification_setting(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_search_spaces_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + client.update_space_notification_setting(request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - space.SearchSpacesResponse( - next_page_token="next_page_token_value", - total_size=1086, - ) - ) - await client.search_spaces(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space.SearchSpacesRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - assert args[0] == request_msg +def test_update_space_notification_setting_rest_required_fields( + request_type=gc_space_notification_setting.UpdateSpaceNotificationSettingRequest, +): + transport_class = transports.ChatServiceRestTransport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_space_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_space), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - space.Space( - name="name_value", - type_=space.Space.Type.ROOM, - space_type=space.Space.SpaceType.SPACE, - single_user_bot_dm=True, - threaded=True, - display_name="display_name_value", - external_user_allowed=True, - space_threading_state=space.Space.SpaceThreadingState.THREADED_MESSAGES, - space_history_state=history_state.HistoryState.HISTORY_OFF, - import_mode=True, - admin_installed=True, - customer="customer_value", - space_uri="space_uri_value", - ) - ) - await client.get_space(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space.GetSpaceRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_space_notification_setting._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_space_notification_setting._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_space_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + # verify required fields with non-default values are left alone + + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_space), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gc_space.Space( - name="name_value", - type_=gc_space.Space.Type.ROOM, - space_type=gc_space.Space.SpaceType.SPACE, - single_user_bot_dm=True, - threaded=True, - display_name="display_name_value", - external_user_allowed=True, - space_threading_state=gc_space.Space.SpaceThreadingState.THREADED_MESSAGES, - space_history_state=history_state.HistoryState.HISTORY_OFF, - import_mode=True, - admin_installed=True, - customer="customer_value", - space_uri="space_uri_value", + # Designate an appropriate value for the returned response. + return_value = gc_space_notification_setting.SpaceNotificationSetting() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gc_space_notification_setting.SpaceNotificationSetting.pb( + return_value ) - ) - await client.create_space(request=None) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gc_space.CreateSpaceRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - assert args[0] == request_msg + response = client.update_space_notification_setting(request) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_set_up_space_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + +def test_update_space_notification_setting_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.set_up_space), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - space.Space( - name="name_value", - type_=space.Space.Type.ROOM, - space_type=space.Space.SpaceType.SPACE, - single_user_bot_dm=True, - threaded=True, - display_name="display_name_value", - external_user_allowed=True, - space_threading_state=space.Space.SpaceThreadingState.THREADED_MESSAGES, - space_history_state=history_state.HistoryState.HISTORY_OFF, - import_mode=True, - admin_installed=True, - customer="customer_value", - space_uri="space_uri_value", + unset_fields = ( + transport.update_space_notification_setting._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "spaceNotificationSetting", + "updateMask", ) ) - await client.set_up_space(request=None) + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space_setup.SetUpSpaceRequest() - assert args[0] == request_msg +def test_update_space_notification_setting_rest_flattened(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gc_space_notification_setting.SpaceNotificationSetting() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_space_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "space_notification_setting": { + "name": "users/sample1/spaces/sample2/spaceNotificationSetting" + } + } - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_space), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gc_space.Space( - name="name_value", - type_=gc_space.Space.Type.ROOM, - space_type=gc_space.Space.SpaceType.SPACE, - single_user_bot_dm=True, - threaded=True, - display_name="display_name_value", - external_user_allowed=True, - space_threading_state=gc_space.Space.SpaceThreadingState.THREADED_MESSAGES, - space_history_state=history_state.HistoryState.HISTORY_OFF, - import_mode=True, - admin_installed=True, - customer="customer_value", - space_uri="space_uri_value", - ) + # get truthy value for each flattened field + mock_args = dict( + space_notification_setting=gc_space_notification_setting.SpaceNotificationSetting( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - await client.update_space(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gc_space.UpdateSpaceRequest() + mock_args.update(sample_request) - assert args[0] == request_msg + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gc_space_notification_setting.SpaceNotificationSetting.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_space_notification_setting(**mock_args) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_space_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{space_notification_setting.name=users/*/spaces/*/spaceNotificationSetting}" + % client.transport._host, + args[1], + ) + + +def test_update_space_notification_setting_rest_flattened_error( + transport: str = "rest", +): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_space), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_space(request=None) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_space_notification_setting( + gc_space_notification_setting.UpdateSpaceNotificationSettingRequest(), + space_notification_setting=gc_space_notification_setting.SpaceNotificationSetting( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space.DeleteSpaceRequest() - assert args[0] == request_msg +def test_create_section_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_complete_import_space_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Ensure method has been cached + assert client._transport.create_section in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.complete_import_space), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - space.CompleteImportSpaceResponse() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - await client.complete_import_space(request=None) + client._transport._wrapped_methods[client._transport.create_section] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space.CompleteImportSpaceRequest() + request = {} + client.create_section(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.create_section(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_find_direct_message_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.find_direct_message), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - space.Space( - name="name_value", - type_=space.Space.Type.ROOM, - space_type=space.Space.SpaceType.SPACE, - single_user_bot_dm=True, - threaded=True, - display_name="display_name_value", - external_user_allowed=True, - space_threading_state=space.Space.SpaceThreadingState.THREADED_MESSAGES, - space_history_state=history_state.HistoryState.HISTORY_OFF, - import_mode=True, - admin_installed=True, - customer="customer_value", - space_uri="space_uri_value", - ) - ) - await client.find_direct_message(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space.FindDirectMessageRequest() +def test_create_section_rest_required_fields( + request_type=gc_section.CreateSectionRequest, +): + transport_class = transports.ChatServiceRestTransport - assert args[0] == request_msg + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_membership_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_section._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_membership), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gc_membership.Membership( - name="name_value", - state=gc_membership.Membership.MembershipState.JOINED, - role=gc_membership.Membership.MembershipRole.ROLE_MEMBER, - ) - ) - await client.create_membership(request=None) + # verify required fields with default values are now present - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gc_membership.CreateMembershipRequest() + jsonified_request["parent"] = "parent_value" - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_section._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_membership_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_membership), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gc_membership.Membership( - name="name_value", - state=gc_membership.Membership.MembershipState.JOINED, - role=gc_membership.Membership.MembershipRole.ROLE_MEMBER, - ) - ) - await client.update_membership(request=None) + # Designate an appropriate value for the returned response. + return_value = gc_section.Section() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gc_membership.UpdateMembershipRequest() + response_value = Response() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = gc_section.Section.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_membership_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + response = client.create_section(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_section_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_membership), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - membership.Membership( - name="name_value", - state=membership.Membership.MembershipState.JOINED, - role=membership.Membership.MembershipRole.ROLE_MEMBER, + unset_fields = transport.create_section._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "section", ) ) - await client.delete_membership(request=None) + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = membership.DeleteMembershipRequest() - assert args[0] == request_msg +def test_create_section_rest_flattened(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gc_section.Section() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_reaction_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "users/sample1"} - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_reaction), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gc_reaction.Reaction( - name="name_value", - ) + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + section=gc_section.Section(name="name_value"), ) - await client.create_reaction(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gc_reaction.CreateReactionRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gc_section.Section.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - assert args[0] == request_msg + client.create_section(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=users/*}/sections" % client.transport._host, args[1] + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_reactions_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_create_section_rest_flattened_error(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_reactions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reaction.ListReactionsResponse( - next_page_token="next_page_token_value", - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_section( + gc_section.CreateSectionRequest(), + parent="parent_value", + section=gc_section.Section(name="name_value"), ) - await client.list_reactions(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reaction.ListReactionsRequest() - assert args[0] == request_msg +def test_delete_section_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_reaction_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Ensure method has been cached + assert client._transport.delete_section in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_reaction), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_reaction(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_section] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reaction.DeleteReactionRequest() + request = {} + client.delete_section(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.delete_section(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_custom_emoji_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_custom_emoji), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reaction.CustomEmoji( - name="name_value", - uid="uid_value", - emoji_name="emoji_name_value", - temporary_image_uri="temporary_image_uri_value", - ) - ) - await client.create_custom_emoji(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reaction.CreateCustomEmojiRequest() +def test_delete_section_rest_required_fields(request_type=section.DeleteSectionRequest): + transport_class = transports.ChatServiceRestTransport - assert args[0] == request_msg + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_custom_emoji_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_section._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_custom_emoji), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reaction.CustomEmoji( - name="name_value", - uid="uid_value", - emoji_name="emoji_name_value", - temporary_image_uri="temporary_image_uri_value", - ) - ) - await client.get_custom_emoji(request=None) + # verify required fields with default values are now present - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reaction.GetCustomEmojiRequest() + jsonified_request["name"] = "name_value" - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_section._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_custom_emojis_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_custom_emojis), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reaction.ListCustomEmojisResponse( - next_page_token="next_page_token_value", - ) - ) - await client.list_custom_emojis(request=None) + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reaction.ListCustomEmojisRequest() + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - assert args[0] == request_msg + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_section(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_custom_emoji_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_section_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_custom_emoji), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_custom_emoji(request=None) + unset_fields = transport.delete_section._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reaction.DeleteCustomEmojiRequest() - assert args[0] == request_msg +def test_delete_section_rest_flattened(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_space_read_state_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"name": "users/sample1/sections/sample2"} - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_space_read_state), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - space_read_state.SpaceReadState( - name="name_value", - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - await client.get_space_read_state(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space_read_state.GetSpaceReadStateRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - assert args[0] == request_msg + client.delete_section(**mock_args) + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=users/*/sections/*}" % client.transport._host, args[1] + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_space_read_state_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + +def test_delete_section_rest_flattened_error(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_space_read_state), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gc_space_read_state.SpaceReadState( - name="name_value", - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_section( + section.DeleteSectionRequest(), + name="name_value", ) - await client.update_space_read_state(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gc_space_read_state.UpdateSpaceReadStateRequest() - assert args[0] == request_msg +def test_update_section_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_thread_read_state_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Ensure method has been cached + assert client._transport.update_section in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_thread_read_state), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - thread_read_state.ThreadReadState( - name="name_value", - ) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - await client.get_thread_read_state(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = thread_read_state.GetThreadReadStateRequest() - - assert args[0] == request_msg + client._transport._wrapped_methods[client._transport.update_section] = mock_rpc + request = {} + client.update_section(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_space_event_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_space_event), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - space_event.SpaceEvent( - name="name_value", - event_type="event_type_value", - ) - ) - await client.get_space_event(request=None) + client.update_section(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space_event.GetSpaceEventRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - assert args[0] == request_msg +def test_update_section_rest_required_fields( + request_type=gc_section.UpdateSectionRequest, +): + transport_class = transports.ChatServiceRestTransport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_space_events_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_space_events), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - space_event.ListSpaceEventsResponse( - next_page_token="next_page_token_value", - ) - ) - await client.list_space_events(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space_event.ListSpaceEventsRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_section._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_section._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_space_notification_setting_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + # verify required fields with non-default values are left alone + + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_space_notification_setting), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - space_notification_setting.SpaceNotificationSetting( - name="name_value", - notification_setting=space_notification_setting.SpaceNotificationSetting.NotificationSetting.ALL, - mute_setting=space_notification_setting.SpaceNotificationSetting.MuteSetting.UNMUTED, - ) - ) - await client.get_space_notification_setting(request=None) + # Designate an appropriate value for the returned response. + return_value = gc_section.Section() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = space_notification_setting.GetSpaceNotificationSettingRequest() + response_value = Response() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = gc_section.Section.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_space_notification_setting_empty_call_grpc_asyncio(): - client = ChatServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + response = client.update_section(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_section_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_space_notification_setting), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gc_space_notification_setting.SpaceNotificationSetting( - name="name_value", - notification_setting=gc_space_notification_setting.SpaceNotificationSetting.NotificationSetting.ALL, - mute_setting=gc_space_notification_setting.SpaceNotificationSetting.MuteSetting.UNMUTED, + unset_fields = transport.update_section._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "section", + "updateMask", ) ) - await client.update_space_notification_setting(request=None) + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = ( - gc_space_notification_setting.UpdateSpaceNotificationSettingRequest() + +def test_update_section_rest_flattened(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gc_section.Section() + + # get arguments that satisfy an http rule for this method + sample_request = {"section": {"name": "users/sample1/sections/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + section=gc_section.Section(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) - assert args[0] == request_msg + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gc_section.Section.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_section(**mock_args) -def test_transport_kind_rest(): - transport = ChatServiceClient.get_transport_class("rest")( + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{section.name=users/*/sections/*}" % client.transport._host, args[1] + ) + + +def test_update_section_rest_flattened_error(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_section( + gc_section.UpdateSectionRequest(), + section=gc_section.Section(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_list_sections_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sections in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_sections] = mock_rpc + + request = {} + client.list_sections(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sections(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_sections_rest_required_fields(request_type=section.ListSectionsRequest): + transport_class = transports.ChatServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_sections._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() + ).list_sections._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) ) - assert transport.kind == "rest" + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -def test_create_message_rest_bad_request(request_type=gc_message.CreateMessageRequest): client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"parent": "spaces/sample1"} request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with ( - mock.patch.object(Session, "request") as req, - pytest.raises(core_exceptions.BadRequest), - ): + # Designate an appropriate value for the returned response. + return_value = section.ListSectionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = section.ListSectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_sections(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_sections_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_sections._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_sections_rest_flattened(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = section.ListSectionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "users/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = section.ListSectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_message(request) + client.list_sections(**mock_args) -@pytest.mark.parametrize( - "request_type", - [ - gc_message.CreateMessageRequest, - dict, - ], -) -def test_create_message_rest_call_success(request_type): + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=users/*}/sections" % client.transport._host, args[1] + ) + + +def test_list_sections_rest_flattened_error(transport: str = "rest"): client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "spaces/sample1"} - request_init["message"] = { - "name": "name_value", - "sender": { - "name": "name_value", - "display_name": "display_name_value", - "domain_id": "domain_id_value", - "type_": 1, - "is_anonymous": True, - }, - "create_time": {"seconds": 751, "nanos": 543}, - "last_update_time": {}, - "delete_time": {}, - "text": "text_value", - "formatted_text": "formatted_text_value", - "cards": [ - { - "header": { - "title": "title_value", - "subtitle": "subtitle_value", - "image_style": 1, - "image_url": "image_url_value", - }, - "sections": [ - { - "header": "header_value", - "widgets": [ - { - "text_paragraph": {"text": "text_value"}, - "image": { - "image_url": "image_url_value", - "on_click": { - "action": { - "action_method_name": "action_method_name_value", - "parameters": [ - { - "key": "key_value", - "value": "value_value", - } - ], - }, - "open_link": {"url": "url_value"}, - }, - "aspect_ratio": 0.1278, - }, - "key_value": { - "icon": 1, - "icon_url": "icon_url_value", - "top_label": "top_label_value", - "content": "content_value", - "content_multiline": True, - "bottom_label": "bottom_label_value", - "on_click": {}, - "button": { - "text_button": { - "text": "text_value", - "on_click": {}, - }, - "image_button": { - "icon": 1, - "icon_url": "icon_url_value", - "on_click": {}, - "name": "name_value", - }, - }, - }, - "buttons": {}, - } - ], - } + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sections( + section.ListSectionsRequest(), + parent="parent_value", + ) + + +def test_list_sections_rest_pager(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + section.ListSectionsResponse( + sections=[ + section.Section(), + section.Section(), + section.Section(), ], - "card_actions": [ - {"action_label": "action_label_value", "on_click": {}} + next_page_token="abc", + ), + section.ListSectionsResponse( + sections=[], + next_page_token="def", + ), + section.ListSectionsResponse( + sections=[ + section.Section(), ], - "name": "name_value", + next_page_token="ghi", + ), + section.ListSectionsResponse( + sections=[ + section.Section(), + section.Section(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(section.ListSectionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "users/sample1"} + + pager = client.list_sections(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, section.Section) for i in results) + + pages = list(client.list_sections(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_position_section_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.position_section in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.position_section] = ( + mock_rpc + ) + + request = {} + client.position_section(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.position_section(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_position_section_rest_required_fields( + request_type=section.PositionSectionRequest, +): + transport_class = transports.ChatServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).position_section._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).position_section._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = section.PositionSectionResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - ], - "cards_v2": [ - { - "card_id": "card_id_value", - "card": { - "header": { - "title": "title_value", - "subtitle": "subtitle_value", - "image_type": 1, - "image_url": "image_url_value", - "image_alt_text": "image_alt_text_value", - }, - "sections": [ - { - "header": "header_value", - "widgets": [ - { - "text_paragraph": { - "text": "text_value", - "max_lines": 960, - "text_syntax": 1, - }, - "image": { - "image_url": "image_url_value", - "on_click": { - "action": { - "function": "function_value", - "parameters": [ - { - "key": "key_value", - "value": "value_value", - } - ], - "load_indicator": 1, - "persist_values": True, - "interaction": 1, - "required_widgets": [ - "required_widgets_value1", - "required_widgets_value2", - ], - "all_widgets_are_required": True, - }, - "open_link": { - "url": "url_value", - "open_as": 1, - "on_close": 1, - }, - "open_dynamic_link_action": {}, - "card": {}, - "overflow_menu": { - "items": [ - { - "start_icon": { - "known_icon": "known_icon_value", - "icon_url": "icon_url_value", - "material_icon": { - "name": "name_value", - "fill": True, - "weight": 648, + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = section.PositionSectionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.position_section(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_position_section_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.position_section._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_list_section_items_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_section_items in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_section_items] = ( + mock_rpc + ) + + request = {} + client.list_section_items(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_section_items(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_section_items_rest_required_fields( + request_type=section.ListSectionItemsRequest, +): + transport_class = transports.ChatServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_section_items._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_section_items._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = section.ListSectionItemsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = section.ListSectionItemsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_section_items(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_section_items_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_section_items._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_section_items_rest_flattened(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = section.ListSectionItemsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "users/sample1/sections/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = section.ListSectionItemsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_section_items(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=users/*/sections/*}/items" % client.transport._host, args[1] + ) + + +def test_list_section_items_rest_flattened_error(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_section_items( + section.ListSectionItemsRequest(), + parent="parent_value", + ) + + +def test_list_section_items_rest_pager(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + section.ListSectionItemsResponse( + section_items=[ + section.SectionItem(), + section.SectionItem(), + section.SectionItem(), + ], + next_page_token="abc", + ), + section.ListSectionItemsResponse( + section_items=[], + next_page_token="def", + ), + section.ListSectionItemsResponse( + section_items=[ + section.SectionItem(), + ], + next_page_token="ghi", + ), + section.ListSectionItemsResponse( + section_items=[ + section.SectionItem(), + section.SectionItem(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(section.ListSectionItemsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "users/sample1/sections/sample2"} + + pager = client.list_section_items(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, section.SectionItem) for i in results) + + pages = list(client.list_section_items(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_move_section_item_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.move_section_item in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.move_section_item] = ( + mock_rpc + ) + + request = {} + client.move_section_item(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.move_section_item(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_move_section_item_rest_required_fields( + request_type=section.MoveSectionItemRequest, +): + transport_class = transports.ChatServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["target_section"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).move_section_item._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["targetSection"] = "target_section_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).move_section_item._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "targetSection" in jsonified_request + assert jsonified_request["targetSection"] == "target_section_value" + + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = section.MoveSectionItemResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = section.MoveSectionItemResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.move_section_item(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_move_section_item_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.move_section_item._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "targetSection", + ) + ) + ) + + +def test_move_section_item_rest_flattened(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = section.MoveSectionItemResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "users/sample1/sections/sample2/items/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + target_section="target_section_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = section.MoveSectionItemResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.move_section_item(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=users/*/sections/*/items/*}:move" % client.transport._host, + args[1], + ) + + +def test_move_section_item_rest_flattened_error(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.move_section_item( + section.MoveSectionItemRequest(), + name="name_value", + target_section="target_section_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ChatServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ChatServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ChatServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ChatServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ChatServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ChatServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ChatServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ChatServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ChatServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ChatServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ChatServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ChatServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ChatServiceGrpcTransport, + transports.ChatServiceGrpcAsyncIOTransport, + transports.ChatServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = ChatServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_message_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_message), "__call__") as call: + call.return_value = gc_message.Message() + client.create_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_message.CreateMessageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_messages_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_messages), "__call__") as call: + call.return_value = message.ListMessagesResponse() + client.list_messages(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = message.ListMessagesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_memberships_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_memberships), "__call__") as call: + call.return_value = membership.ListMembershipsResponse() + client.list_memberships(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = membership.ListMembershipsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_membership_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_membership), "__call__") as call: + call.return_value = membership.Membership() + client.get_membership(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = membership.GetMembershipRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_message_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_message), "__call__") as call: + call.return_value = message.Message() + client.get_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = message.GetMessageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_message_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_message), "__call__") as call: + call.return_value = gc_message.Message() + client.update_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_message.UpdateMessageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_message_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_message), "__call__") as call: + call.return_value = None + client.delete_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = message.DeleteMessageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_attachment_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_attachment), "__call__") as call: + call.return_value = attachment.Attachment() + client.get_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = attachment.GetAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_upload_attachment_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.upload_attachment), "__call__" + ) as call: + call.return_value = attachment.UploadAttachmentResponse() + client.upload_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = attachment.UploadAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_spaces_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_spaces), "__call__") as call: + call.return_value = space.ListSpacesResponse() + client.list_spaces(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space.ListSpacesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_spaces_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + call.return_value = space.SearchSpacesResponse() + client.search_spaces(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space.SearchSpacesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_space_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_space), "__call__") as call: + call.return_value = space.Space() + client.get_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space.GetSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_space_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_space), "__call__") as call: + call.return_value = gc_space.Space() + client.create_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_space.CreateSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_up_space_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.set_up_space), "__call__") as call: + call.return_value = space.Space() + client.set_up_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space_setup.SetUpSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_space_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_space), "__call__") as call: + call.return_value = gc_space.Space() + client.update_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_space.UpdateSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_space_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_space), "__call__") as call: + call.return_value = None + client.delete_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space.DeleteSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_complete_import_space_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.complete_import_space), "__call__" + ) as call: + call.return_value = space.CompleteImportSpaceResponse() + client.complete_import_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space.CompleteImportSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_find_direct_message_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.find_direct_message), "__call__" + ) as call: + call.return_value = space.Space() + client.find_direct_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space.FindDirectMessageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_membership_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_membership), "__call__" + ) as call: + call.return_value = gc_membership.Membership() + client.create_membership(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_membership.CreateMembershipRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_membership_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_membership), "__call__" + ) as call: + call.return_value = gc_membership.Membership() + client.update_membership(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_membership.UpdateMembershipRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_membership_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_membership), "__call__" + ) as call: + call.return_value = membership.Membership() + client.delete_membership(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = membership.DeleteMembershipRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_reaction_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_reaction), "__call__") as call: + call.return_value = gc_reaction.Reaction() + client.create_reaction(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_reaction.CreateReactionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_reactions_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_reactions), "__call__") as call: + call.return_value = reaction.ListReactionsResponse() + client.list_reactions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reaction.ListReactionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_reaction_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_reaction), "__call__") as call: + call.return_value = None + client.delete_reaction(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reaction.DeleteReactionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_custom_emoji_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_custom_emoji), "__call__" + ) as call: + call.return_value = reaction.CustomEmoji() + client.create_custom_emoji(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reaction.CreateCustomEmojiRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_custom_emoji_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_custom_emoji), "__call__") as call: + call.return_value = reaction.CustomEmoji() + client.get_custom_emoji(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reaction.GetCustomEmojiRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_custom_emojis_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_emojis), "__call__" + ) as call: + call.return_value = reaction.ListCustomEmojisResponse() + client.list_custom_emojis(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reaction.ListCustomEmojisRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_custom_emoji_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_custom_emoji), "__call__" + ) as call: + call.return_value = None + client.delete_custom_emoji(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reaction.DeleteCustomEmojiRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_space_read_state_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_space_read_state), "__call__" + ) as call: + call.return_value = space_read_state.SpaceReadState() + client.get_space_read_state(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space_read_state.GetSpaceReadStateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_space_read_state_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_space_read_state), "__call__" + ) as call: + call.return_value = gc_space_read_state.SpaceReadState() + client.update_space_read_state(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_space_read_state.UpdateSpaceReadStateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_thread_read_state_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_thread_read_state), "__call__" + ) as call: + call.return_value = thread_read_state.ThreadReadState() + client.get_thread_read_state(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = thread_read_state.GetThreadReadStateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_space_event_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_space_event), "__call__") as call: + call.return_value = space_event.SpaceEvent() + client.get_space_event(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space_event.GetSpaceEventRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_space_events_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_space_events), "__call__" + ) as call: + call.return_value = space_event.ListSpaceEventsResponse() + client.list_space_events(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space_event.ListSpaceEventsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_space_notification_setting_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_space_notification_setting), "__call__" + ) as call: + call.return_value = space_notification_setting.SpaceNotificationSetting() + client.get_space_notification_setting(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space_notification_setting.GetSpaceNotificationSettingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_space_notification_setting_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_space_notification_setting), "__call__" + ) as call: + call.return_value = gc_space_notification_setting.SpaceNotificationSetting() + client.update_space_notification_setting(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gc_space_notification_setting.UpdateSpaceNotificationSettingRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_section_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_section), "__call__") as call: + call.return_value = gc_section.Section() + client.create_section(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_section.CreateSectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_section_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_section), "__call__") as call: + call.return_value = None + client.delete_section(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = section.DeleteSectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_section_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_section), "__call__") as call: + call.return_value = gc_section.Section() + client.update_section(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_section.UpdateSectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sections_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_sections), "__call__") as call: + call.return_value = section.ListSectionsResponse() + client.list_sections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = section.ListSectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_position_section_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.position_section), "__call__") as call: + call.return_value = section.PositionSectionResponse() + client.position_section(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = section.PositionSectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_section_items_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_section_items), "__call__" + ) as call: + call.return_value = section.ListSectionItemsResponse() + client.list_section_items(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = section.ListSectionItemsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_move_section_item_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.move_section_item), "__call__" + ) as call: + call.return_value = section.MoveSectionItemResponse() + client.move_section_item(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = section.MoveSectionItemRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = ChatServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_message_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gc_message.Message( + name="name_value", + text="text_value", + formatted_text="formatted_text_value", + fallback_text="fallback_text_value", + argument_text="argument_text_value", + thread_reply=True, + client_assigned_message_id="client_assigned_message_id_value", + ) + ) + await client.create_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_message.CreateMessageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_messages_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_messages), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + message.ListMessagesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_messages(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = message.ListMessagesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_memberships_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_memberships), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + membership.ListMembershipsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_memberships(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = membership.ListMembershipsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_membership_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_membership), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + membership.Membership( + name="name_value", + state=membership.Membership.MembershipState.JOINED, + role=membership.Membership.MembershipRole.ROLE_MEMBER, + ) + ) + await client.get_membership(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = membership.GetMembershipRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_message_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + message.Message( + name="name_value", + text="text_value", + formatted_text="formatted_text_value", + fallback_text="fallback_text_value", + argument_text="argument_text_value", + thread_reply=True, + client_assigned_message_id="client_assigned_message_id_value", + ) + ) + await client.get_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = message.GetMessageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_message_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gc_message.Message( + name="name_value", + text="text_value", + formatted_text="formatted_text_value", + fallback_text="fallback_text_value", + argument_text="argument_text_value", + thread_reply=True, + client_assigned_message_id="client_assigned_message_id_value", + ) + ) + await client.update_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_message.UpdateMessageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_message_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = message.DeleteMessageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_attachment_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_attachment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + attachment.Attachment( + name="name_value", + content_name="content_name_value", + content_type="content_type_value", + thumbnail_uri="thumbnail_uri_value", + download_uri="download_uri_value", + source=attachment.Attachment.Source.DRIVE_FILE, + ) + ) + await client.get_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = attachment.GetAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_upload_attachment_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.upload_attachment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + attachment.UploadAttachmentResponse() + ) + await client.upload_attachment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = attachment.UploadAttachmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_spaces_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_spaces), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space.ListSpacesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_spaces(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space.ListSpacesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_search_spaces_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space.SearchSpacesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + await client.search_spaces(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space.SearchSpacesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_space_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space.Space( + name="name_value", + type_=space.Space.Type.ROOM, + space_type=space.Space.SpaceType.SPACE, + single_user_bot_dm=True, + threaded=True, + display_name="display_name_value", + external_user_allowed=True, + space_threading_state=space.Space.SpaceThreadingState.THREADED_MESSAGES, + space_history_state=history_state.HistoryState.HISTORY_OFF, + import_mode=True, + admin_installed=True, + customer="customer_value", + space_uri="space_uri_value", + ) + ) + await client.get_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space.GetSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_space_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gc_space.Space( + name="name_value", + type_=gc_space.Space.Type.ROOM, + space_type=gc_space.Space.SpaceType.SPACE, + single_user_bot_dm=True, + threaded=True, + display_name="display_name_value", + external_user_allowed=True, + space_threading_state=gc_space.Space.SpaceThreadingState.THREADED_MESSAGES, + space_history_state=history_state.HistoryState.HISTORY_OFF, + import_mode=True, + admin_installed=True, + customer="customer_value", + space_uri="space_uri_value", + ) + ) + await client.create_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_space.CreateSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_set_up_space_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.set_up_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space.Space( + name="name_value", + type_=space.Space.Type.ROOM, + space_type=space.Space.SpaceType.SPACE, + single_user_bot_dm=True, + threaded=True, + display_name="display_name_value", + external_user_allowed=True, + space_threading_state=space.Space.SpaceThreadingState.THREADED_MESSAGES, + space_history_state=history_state.HistoryState.HISTORY_OFF, + import_mode=True, + admin_installed=True, + customer="customer_value", + space_uri="space_uri_value", + ) + ) + await client.set_up_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space_setup.SetUpSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_space_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gc_space.Space( + name="name_value", + type_=gc_space.Space.Type.ROOM, + space_type=gc_space.Space.SpaceType.SPACE, + single_user_bot_dm=True, + threaded=True, + display_name="display_name_value", + external_user_allowed=True, + space_threading_state=gc_space.Space.SpaceThreadingState.THREADED_MESSAGES, + space_history_state=history_state.HistoryState.HISTORY_OFF, + import_mode=True, + admin_installed=True, + customer="customer_value", + space_uri="space_uri_value", + ) + ) + await client.update_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_space.UpdateSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_space_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space.DeleteSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_complete_import_space_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.complete_import_space), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space.CompleteImportSpaceResponse() + ) + await client.complete_import_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space.CompleteImportSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_find_direct_message_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.find_direct_message), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space.Space( + name="name_value", + type_=space.Space.Type.ROOM, + space_type=space.Space.SpaceType.SPACE, + single_user_bot_dm=True, + threaded=True, + display_name="display_name_value", + external_user_allowed=True, + space_threading_state=space.Space.SpaceThreadingState.THREADED_MESSAGES, + space_history_state=history_state.HistoryState.HISTORY_OFF, + import_mode=True, + admin_installed=True, + customer="customer_value", + space_uri="space_uri_value", + ) + ) + await client.find_direct_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space.FindDirectMessageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_membership_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_membership), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gc_membership.Membership( + name="name_value", + state=gc_membership.Membership.MembershipState.JOINED, + role=gc_membership.Membership.MembershipRole.ROLE_MEMBER, + ) + ) + await client.create_membership(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_membership.CreateMembershipRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_membership_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_membership), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gc_membership.Membership( + name="name_value", + state=gc_membership.Membership.MembershipState.JOINED, + role=gc_membership.Membership.MembershipRole.ROLE_MEMBER, + ) + ) + await client.update_membership(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_membership.UpdateMembershipRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_membership_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_membership), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + membership.Membership( + name="name_value", + state=membership.Membership.MembershipState.JOINED, + role=membership.Membership.MembershipRole.ROLE_MEMBER, + ) + ) + await client.delete_membership(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = membership.DeleteMembershipRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_reaction_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_reaction), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gc_reaction.Reaction( + name="name_value", + ) + ) + await client.create_reaction(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_reaction.CreateReactionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_reactions_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_reactions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reaction.ListReactionsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_reactions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reaction.ListReactionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_reaction_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_reaction), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_reaction(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reaction.DeleteReactionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_custom_emoji_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_custom_emoji), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reaction.CustomEmoji( + name="name_value", + uid="uid_value", + emoji_name="emoji_name_value", + temporary_image_uri="temporary_image_uri_value", + ) + ) + await client.create_custom_emoji(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reaction.CreateCustomEmojiRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_custom_emoji_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_custom_emoji), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reaction.CustomEmoji( + name="name_value", + uid="uid_value", + emoji_name="emoji_name_value", + temporary_image_uri="temporary_image_uri_value", + ) + ) + await client.get_custom_emoji(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reaction.GetCustomEmojiRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_custom_emojis_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_emojis), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reaction.ListCustomEmojisResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_custom_emojis(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reaction.ListCustomEmojisRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_custom_emoji_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_custom_emoji), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_custom_emoji(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reaction.DeleteCustomEmojiRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_space_read_state_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_space_read_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space_read_state.SpaceReadState( + name="name_value", + ) + ) + await client.get_space_read_state(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space_read_state.GetSpaceReadStateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_space_read_state_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_space_read_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gc_space_read_state.SpaceReadState( + name="name_value", + ) + ) + await client.update_space_read_state(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_space_read_state.UpdateSpaceReadStateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_thread_read_state_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_thread_read_state), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + thread_read_state.ThreadReadState( + name="name_value", + ) + ) + await client.get_thread_read_state(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = thread_read_state.GetThreadReadStateRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_space_event_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_space_event), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space_event.SpaceEvent( + name="name_value", + event_type="event_type_value", + ) + ) + await client.get_space_event(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space_event.GetSpaceEventRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_space_events_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_space_events), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space_event.ListSpaceEventsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_space_events(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space_event.ListSpaceEventsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_space_notification_setting_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_space_notification_setting), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space_notification_setting.SpaceNotificationSetting( + name="name_value", + notification_setting=space_notification_setting.SpaceNotificationSetting.NotificationSetting.ALL, + mute_setting=space_notification_setting.SpaceNotificationSetting.MuteSetting.UNMUTED, + ) + ) + await client.get_space_notification_setting(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space_notification_setting.GetSpaceNotificationSettingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_space_notification_setting_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_space_notification_setting), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gc_space_notification_setting.SpaceNotificationSetting( + name="name_value", + notification_setting=gc_space_notification_setting.SpaceNotificationSetting.NotificationSetting.ALL, + mute_setting=gc_space_notification_setting.SpaceNotificationSetting.MuteSetting.UNMUTED, + ) + ) + await client.update_space_notification_setting(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + gc_space_notification_setting.UpdateSpaceNotificationSettingRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_section_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gc_section.Section( + name="name_value", + display_name="display_name_value", + sort_order=1091, + type_=gc_section.Section.SectionType.CUSTOM_SECTION, + ) + ) + await client.create_section(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_section.CreateSectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_section_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_section(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = section.DeleteSectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_section_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gc_section.Section( + name="name_value", + display_name="display_name_value", + sort_order=1091, + type_=gc_section.Section.SectionType.CUSTOM_SECTION, + ) + ) + await client.update_section(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_section.UpdateSectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_sections_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_sections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + section.ListSectionsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_sections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = section.ListSectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_position_section_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.position_section), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + section.PositionSectionResponse() + ) + await client.position_section(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = section.PositionSectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_section_items_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_section_items), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + section.ListSectionItemsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_section_items(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = section.ListSectionItemsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_move_section_item_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.move_section_item), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + section.MoveSectionItemResponse() + ) + await client.move_section_item(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = section.MoveSectionItemRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = ChatServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_message_rest_bad_request(request_type=gc_message.CreateMessageRequest): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "spaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_message(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gc_message.CreateMessageRequest, + dict, + ], +) +def test_create_message_rest_call_success(request_type): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "spaces/sample1"} + request_init["message"] = { + "name": "name_value", + "sender": { + "name": "name_value", + "display_name": "display_name_value", + "domain_id": "domain_id_value", + "type_": 1, + "is_anonymous": True, + }, + "create_time": {"seconds": 751, "nanos": 543}, + "last_update_time": {}, + "delete_time": {}, + "text": "text_value", + "formatted_text": "formatted_text_value", + "cards": [ + { + "header": { + "title": "title_value", + "subtitle": "subtitle_value", + "image_style": 1, + "image_url": "image_url_value", + }, + "sections": [ + { + "header": "header_value", + "widgets": [ + { + "text_paragraph": {"text": "text_value"}, + "image": { + "image_url": "image_url_value", + "on_click": { + "action": { + "action_method_name": "action_method_name_value", + "parameters": [ + { + "key": "key_value", + "value": "value_value", + } + ], + }, + "open_link": {"url": "url_value"}, + }, + "aspect_ratio": 0.1278, + }, + "key_value": { + "icon": 1, + "icon_url": "icon_url_value", + "top_label": "top_label_value", + "content": "content_value", + "content_multiline": True, + "bottom_label": "bottom_label_value", + "on_click": {}, + "button": { + "text_button": { + "text": "text_value", + "on_click": {}, + }, + "image_button": { + "icon": 1, + "icon_url": "icon_url_value", + "on_click": {}, + "name": "name_value", + }, + }, + }, + "buttons": {}, + } + ], + } + ], + "card_actions": [ + {"action_label": "action_label_value", "on_click": {}} + ], + "name": "name_value", + } + ], + "cards_v2": [ + { + "card_id": "card_id_value", + "card": { + "header": { + "title": "title_value", + "subtitle": "subtitle_value", + "image_type": 1, + "image_url": "image_url_value", + "image_alt_text": "image_alt_text_value", + }, + "sections": [ + { + "header": "header_value", + "widgets": [ + { + "text_paragraph": { + "text": "text_value", + "max_lines": 960, + "text_syntax": 1, + }, + "image": { + "image_url": "image_url_value", + "on_click": { + "action": { + "function": "function_value", + "parameters": [ + { + "key": "key_value", + "value": "value_value", + } + ], + "load_indicator": 1, + "persist_values": True, + "interaction": 1, + "required_widgets": [ + "required_widgets_value1", + "required_widgets_value2", + ], + "all_widgets_are_required": True, + }, + "open_link": { + "url": "url_value", + "open_as": 1, + "on_close": 1, + }, + "open_dynamic_link_action": {}, + "card": {}, + "overflow_menu": { + "items": [ + { + "start_icon": { + "known_icon": "known_icon_value", + "icon_url": "icon_url_value", + "material_icon": { + "name": "name_value", + "fill": True, + "weight": 648, "grade": 515, }, "alt_text": "alt_text_value", @@ -23832,82 +28182,860 @@ def test_update_message_rest_call_success(request_type): # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Determine if the message type is proto-plus or protobuf - test_field = gc_message.UpdateMessageRequest.meta.fields["message"] + # Determine if the message type is proto-plus or protobuf + test_field = gc_message.UpdateMessageRequest.meta.fields["message"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["message"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["message"][field])): + del request_init["message"][field][i][subfield] + else: + del request_init["message"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gc_message.Message( + name="name_value", + text="text_value", + formatted_text="formatted_text_value", + fallback_text="fallback_text_value", + argument_text="argument_text_value", + thread_reply=True, + client_assigned_message_id="client_assigned_message_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gc_message.Message.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_message(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gc_message.Message) + assert response.name == "name_value" + assert response.text == "text_value" + assert response.formatted_text == "formatted_text_value" + assert response.fallback_text == "fallback_text_value" + assert response.argument_text == "argument_text_value" + assert response.thread_reply is True + assert response.client_assigned_message_id == "client_assigned_message_id_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_message_rest_interceptors(null_interceptor): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ChatServiceRestInterceptor(), + ) + client = ChatServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ChatServiceRestInterceptor, "post_update_message" + ) as post, + mock.patch.object( + transports.ChatServiceRestInterceptor, "post_update_message_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.ChatServiceRestInterceptor, "pre_update_message" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gc_message.UpdateMessageRequest.pb( + gc_message.UpdateMessageRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gc_message.Message.to_json(gc_message.Message()) + req.return_value.content = return_value + + request = gc_message.UpdateMessageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gc_message.Message() + post_with_metadata.return_value = gc_message.Message(), metadata + + client.update_message( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_message_rest_bad_request(request_type=message.DeleteMessageRequest): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "spaces/sample1/messages/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_message(request) + + +@pytest.mark.parametrize( + "request_type", + [ + message.DeleteMessageRequest, + dict, + ], +) +def test_delete_message_rest_call_success(request_type): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "spaces/sample1/messages/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_message(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_message_rest_interceptors(null_interceptor): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ChatServiceRestInterceptor(), + ) + client = ChatServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ChatServiceRestInterceptor, "pre_delete_message" + ) as pre, + ): + pre.assert_not_called() + pb_message = message.DeleteMessageRequest.pb(message.DeleteMessageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = message.DeleteMessageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_message( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_get_attachment_rest_bad_request(request_type=attachment.GetAttachmentRequest): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "spaces/sample1/messages/sample2/attachments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_attachment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + attachment.GetAttachmentRequest, + dict, + ], +) +def test_get_attachment_rest_call_success(request_type): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "spaces/sample1/messages/sample2/attachments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = attachment.Attachment( + name="name_value", + content_name="content_name_value", + content_type="content_type_value", + thumbnail_uri="thumbnail_uri_value", + download_uri="download_uri_value", + source=attachment.Attachment.Source.DRIVE_FILE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = attachment.Attachment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_attachment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, attachment.Attachment) + assert response.name == "name_value" + assert response.content_name == "content_name_value" + assert response.content_type == "content_type_value" + assert response.thumbnail_uri == "thumbnail_uri_value" + assert response.download_uri == "download_uri_value" + assert response.source == attachment.Attachment.Source.DRIVE_FILE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_attachment_rest_interceptors(null_interceptor): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ChatServiceRestInterceptor(), + ) + client = ChatServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ChatServiceRestInterceptor, "post_get_attachment" + ) as post, + mock.patch.object( + transports.ChatServiceRestInterceptor, "post_get_attachment_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.ChatServiceRestInterceptor, "pre_get_attachment" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = attachment.GetAttachmentRequest.pb( + attachment.GetAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = attachment.Attachment.to_json(attachment.Attachment()) + req.return_value.content = return_value + + request = attachment.GetAttachmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = attachment.Attachment() + post_with_metadata.return_value = attachment.Attachment(), metadata + + client.get_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_upload_attachment_rest_bad_request( + request_type=attachment.UploadAttachmentRequest, +): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "spaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.upload_attachment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + attachment.UploadAttachmentRequest, + dict, + ], +) +def test_upload_attachment_rest_call_success(request_type): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "spaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = attachment.UploadAttachmentResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = attachment.UploadAttachmentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.upload_attachment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, attachment.UploadAttachmentResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_upload_attachment_rest_interceptors(null_interceptor): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ChatServiceRestInterceptor(), + ) + client = ChatServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ChatServiceRestInterceptor, "post_upload_attachment" + ) as post, + mock.patch.object( + transports.ChatServiceRestInterceptor, + "post_upload_attachment_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.ChatServiceRestInterceptor, "pre_upload_attachment" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = attachment.UploadAttachmentRequest.pb( + attachment.UploadAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = attachment.UploadAttachmentResponse.to_json( + attachment.UploadAttachmentResponse() + ) + req.return_value.content = return_value + + request = attachment.UploadAttachmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = attachment.UploadAttachmentResponse() + post_with_metadata.return_value = ( + attachment.UploadAttachmentResponse(), + metadata, + ) + + client.upload_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_spaces_rest_bad_request(request_type=space.ListSpacesRequest): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_spaces(request) + + +@pytest.mark.parametrize( + "request_type", + [ + space.ListSpacesRequest, + dict, + ], +) +def test_list_spaces_rest_call_success(request_type): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = space.ListSpacesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = space.ListSpacesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_spaces(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSpacesPager) + assert response.next_page_token == "next_page_token_value" - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_spaces_rest_interceptors(null_interceptor): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ChatServiceRestInterceptor(), + ) + client = ChatServiceClient(transport=transport) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ChatServiceRestInterceptor, "post_list_spaces" + ) as post, + mock.patch.object( + transports.ChatServiceRestInterceptor, "post_list_spaces_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.ChatServiceRestInterceptor, "pre_list_spaces" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = space.ListSpacesRequest.pb(space.ListSpacesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = space.ListSpacesResponse.to_json(space.ListSpacesResponse()) + req.return_value.content = return_value - subfields_not_in_runtime = [] + request = space.ListSpacesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = space.ListSpacesResponse() + post_with_metadata.return_value = space.ListSpacesResponse(), metadata - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["message"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + client.list_spaces( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_search_spaces_rest_bad_request(request_type=space.SearchSpacesRequest): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.search_spaces(request) + + +@pytest.mark.parametrize( + "request_type", + [ + space.SearchSpacesRequest, + dict, + ], +) +def test_search_spaces_rest_call_success(request_type): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = space.SearchSpacesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = space.SearchSpacesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.search_spaces(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchSpacesPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_spaces_rest_interceptors(null_interceptor): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ChatServiceRestInterceptor(), + ) + client = ChatServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ChatServiceRestInterceptor, "post_search_spaces" + ) as post, + mock.patch.object( + transports.ChatServiceRestInterceptor, "post_search_spaces_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.ChatServiceRestInterceptor, "pre_search_spaces" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = space.SearchSpacesRequest.pb(space.SearchSpacesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = space.SearchSpacesResponse.to_json(space.SearchSpacesResponse()) + req.return_value.content = return_value + + request = space.SearchSpacesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = space.SearchSpacesResponse() + post_with_metadata.return_value = space.SearchSpacesResponse(), metadata + + client.search_spaces( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_space_rest_bad_request(request_type=space.GetSpaceRequest): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "spaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_space(request) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["message"][field])): - del request_init["message"][field][i][subfield] - else: - del request_init["message"][field][subfield] + +@pytest.mark.parametrize( + "request_type", + [ + space.GetSpaceRequest, + dict, + ], +) +def test_get_space_rest_call_success(request_type): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "spaces/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gc_message.Message( + return_value = space.Space( name="name_value", - text="text_value", - formatted_text="formatted_text_value", - fallback_text="fallback_text_value", - argument_text="argument_text_value", - thread_reply=True, - client_assigned_message_id="client_assigned_message_id_value", + type_=space.Space.Type.ROOM, + space_type=space.Space.SpaceType.SPACE, + single_user_bot_dm=True, + threaded=True, + display_name="display_name_value", + external_user_allowed=True, + space_threading_state=space.Space.SpaceThreadingState.THREADED_MESSAGES, + space_history_state=history_state.HistoryState.HISTORY_OFF, + import_mode=True, + admin_installed=True, + customer="customer_value", + space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -23915,26 +29043,35 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = gc_message.Message.pb(return_value) + return_value = space.Space.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_message(request) + response = client.get_space(request) # Establish that the response is the type that we expect. - assert isinstance(response, gc_message.Message) + assert isinstance(response, space.Space) assert response.name == "name_value" - assert response.text == "text_value" - assert response.formatted_text == "formatted_text_value" - assert response.fallback_text == "fallback_text_value" - assert response.argument_text == "argument_text_value" - assert response.thread_reply is True - assert response.client_assigned_message_id == "client_assigned_message_id_value" + assert response.type_ == space.Space.Type.ROOM + assert response.space_type == space.Space.SpaceType.SPACE + assert response.single_user_bot_dm is True + assert response.threaded is True + assert response.display_name == "display_name_value" + assert response.external_user_allowed is True + assert ( + response.space_threading_state + == space.Space.SpaceThreadingState.THREADED_MESSAGES + ) + assert response.space_history_state == history_state.HistoryState.HISTORY_OFF + assert response.import_mode is True + assert response.admin_installed is True + assert response.customer == "customer_value" + assert response.space_uri == "space_uri_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_message_rest_interceptors(null_interceptor): +def test_get_space_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -23947,21 +29084,19 @@ def test_update_message_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_update_message" + transports.ChatServiceRestInterceptor, "post_get_space" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_update_message_with_metadata" + transports.ChatServiceRestInterceptor, "post_get_space_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_update_message" + transports.ChatServiceRestInterceptor, "pre_get_space" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = gc_message.UpdateMessageRequest.pb( - gc_message.UpdateMessageRequest() - ) + pb_message = space.GetSpaceRequest.pb(space.GetSpaceRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23972,19 +29107,19 @@ def test_update_message_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gc_message.Message.to_json(gc_message.Message()) + return_value = space.Space.to_json(space.Space()) req.return_value.content = return_value - request = gc_message.UpdateMessageRequest() + request = space.GetSpaceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gc_message.Message() - post_with_metadata.return_value = gc_message.Message(), metadata + post.return_value = space.Space() + post_with_metadata.return_value = space.Space(), metadata - client.update_message( + client.get_space( request, metadata=[ ("key", "val"), @@ -23997,12 +29132,12 @@ def test_update_message_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_message_rest_bad_request(request_type=message.DeleteMessageRequest): +def test_create_space_rest_bad_request(request_type=gc_space.CreateSpaceRequest): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1/messages/sample2"} + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24018,45 +29153,188 @@ def test_delete_message_rest_bad_request(request_type=message.DeleteMessageReque response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_message(request) + client.create_space(request) @pytest.mark.parametrize( "request_type", [ - message.DeleteMessageRequest, + gc_space.CreateSpaceRequest, dict, ], ) -def test_delete_message_rest_call_success(request_type): +def test_create_space_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1/messages/sample2"} + request_init = {} + request_init["space"] = { + "name": "name_value", + "type_": 1, + "space_type": 1, + "single_user_bot_dm": True, + "threaded": True, + "display_name": "display_name_value", + "external_user_allowed": True, + "space_threading_state": 2, + "space_details": { + "description": "description_value", + "guidelines": "guidelines_value", + }, + "space_history_state": 1, + "import_mode": True, + "create_time": {"seconds": 751, "nanos": 543}, + "last_active_time": {}, + "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, + "access_settings": {"access_state": 1, "audience": "audience_value"}, + "customer": "customer_value", + "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "assistant_managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, + "import_mode_expire_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gc_space.CreateSpaceRequest.meta.fields["space"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["space"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["space"][field])): + del request_init["space"][field][i][subfield] + else: + del request_init["space"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = gc_space.Space( + name="name_value", + type_=gc_space.Space.Type.ROOM, + space_type=gc_space.Space.SpaceType.SPACE, + single_user_bot_dm=True, + threaded=True, + display_name="display_name_value", + external_user_allowed=True, + space_threading_state=gc_space.Space.SpaceThreadingState.THREADED_MESSAGES, + space_history_state=history_state.HistoryState.HISTORY_OFF, + import_mode=True, + admin_installed=True, + customer="customer_value", + space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = gc_space.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_message(request) + response = client.create_space(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, gc_space.Space) + assert response.name == "name_value" + assert response.type_ == gc_space.Space.Type.ROOM + assert response.space_type == gc_space.Space.SpaceType.SPACE + assert response.single_user_bot_dm is True + assert response.threaded is True + assert response.display_name == "display_name_value" + assert response.external_user_allowed is True + assert ( + response.space_threading_state + == gc_space.Space.SpaceThreadingState.THREADED_MESSAGES + ) + assert response.space_history_state == history_state.HistoryState.HISTORY_OFF + assert response.import_mode is True + assert response.admin_installed is True + assert response.customer == "customer_value" + assert response.space_uri == "space_uri_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_message_rest_interceptors(null_interceptor): +def test_create_space_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24069,11 +29347,19 @@ def test_delete_message_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_delete_message" + transports.ChatServiceRestInterceptor, "post_create_space" + ) as post, + mock.patch.object( + transports.ChatServiceRestInterceptor, "post_create_space_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.ChatServiceRestInterceptor, "pre_create_space" ) as pre, ): pre.assert_not_called() - pb_message = message.DeleteMessageRequest.pb(message.DeleteMessageRequest()) + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gc_space.CreateSpaceRequest.pb(gc_space.CreateSpaceRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -24084,15 +29370,19 @@ def test_delete_message_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gc_space.Space.to_json(gc_space.Space()) + req.return_value.content = return_value - request = message.DeleteMessageRequest() + request = gc_space.CreateSpaceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = gc_space.Space() + post_with_metadata.return_value = gc_space.Space(), metadata - client.delete_message( + client.create_space( request, metadata=[ ("key", "val"), @@ -24101,14 +29391,16 @@ def test_delete_message_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_get_attachment_rest_bad_request(request_type=attachment.GetAttachmentRequest): +def test_set_up_space_rest_bad_request(request_type=space_setup.SetUpSpaceRequest): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1/messages/sample2/attachments/sample3"} + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24124,35 +29416,43 @@ def test_get_attachment_rest_bad_request(request_type=attachment.GetAttachmentRe response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_attachment(request) + client.set_up_space(request) @pytest.mark.parametrize( "request_type", [ - attachment.GetAttachmentRequest, + space_setup.SetUpSpaceRequest, dict, ], ) -def test_get_attachment_rest_call_success(request_type): +def test_set_up_space_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1/messages/sample2/attachments/sample3"} + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = attachment.Attachment( + return_value = space.Space( name="name_value", - content_name="content_name_value", - content_type="content_type_value", - thumbnail_uri="thumbnail_uri_value", - download_uri="download_uri_value", - source=attachment.Attachment.Source.DRIVE_FILE, + type_=space.Space.Type.ROOM, + space_type=space.Space.SpaceType.SPACE, + single_user_bot_dm=True, + threaded=True, + display_name="display_name_value", + external_user_allowed=True, + space_threading_state=space.Space.SpaceThreadingState.THREADED_MESSAGES, + space_history_state=history_state.HistoryState.HISTORY_OFF, + import_mode=True, + admin_installed=True, + customer="customer_value", + space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -24160,25 +29460,35 @@ def test_get_attachment_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = attachment.Attachment.pb(return_value) + return_value = space.Space.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_attachment(request) + response = client.set_up_space(request) # Establish that the response is the type that we expect. - assert isinstance(response, attachment.Attachment) + assert isinstance(response, space.Space) assert response.name == "name_value" - assert response.content_name == "content_name_value" - assert response.content_type == "content_type_value" - assert response.thumbnail_uri == "thumbnail_uri_value" - assert response.download_uri == "download_uri_value" - assert response.source == attachment.Attachment.Source.DRIVE_FILE + assert response.type_ == space.Space.Type.ROOM + assert response.space_type == space.Space.SpaceType.SPACE + assert response.single_user_bot_dm is True + assert response.threaded is True + assert response.display_name == "display_name_value" + assert response.external_user_allowed is True + assert ( + response.space_threading_state + == space.Space.SpaceThreadingState.THREADED_MESSAGES + ) + assert response.space_history_state == history_state.HistoryState.HISTORY_OFF + assert response.import_mode is True + assert response.admin_installed is True + assert response.customer == "customer_value" + assert response.space_uri == "space_uri_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_attachment_rest_interceptors(null_interceptor): +def test_set_up_space_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24191,21 +29501,19 @@ def test_get_attachment_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_get_attachment" + transports.ChatServiceRestInterceptor, "post_set_up_space" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_get_attachment_with_metadata" + transports.ChatServiceRestInterceptor, "post_set_up_space_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_get_attachment" + transports.ChatServiceRestInterceptor, "pre_set_up_space" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = attachment.GetAttachmentRequest.pb( - attachment.GetAttachmentRequest() - ) + pb_message = space_setup.SetUpSpaceRequest.pb(space_setup.SetUpSpaceRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -24216,19 +29524,19 @@ def test_get_attachment_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = attachment.Attachment.to_json(attachment.Attachment()) + return_value = space.Space.to_json(space.Space()) req.return_value.content = return_value - request = attachment.GetAttachmentRequest() + request = space_setup.SetUpSpaceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = attachment.Attachment() - post_with_metadata.return_value = attachment.Attachment(), metadata + post.return_value = space.Space() + post_with_metadata.return_value = space.Space(), metadata - client.get_attachment( + client.set_up_space( request, metadata=[ ("key", "val"), @@ -24241,14 +29549,12 @@ def test_get_attachment_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_upload_attachment_rest_bad_request( - request_type=attachment.UploadAttachmentRequest, -): +def test_update_space_rest_bad_request(request_type=gc_space.UpdateSpaceRequest): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "spaces/sample1"} + request_init = {"space": {"name": "spaces/sample1"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24264,161 +29570,152 @@ def test_upload_attachment_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.upload_attachment(request) + client.update_space(request) @pytest.mark.parametrize( "request_type", [ - attachment.UploadAttachmentRequest, + gc_space.UpdateSpaceRequest, dict, ], ) -def test_upload_attachment_rest_call_success(request_type): +def test_update_space_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "spaces/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = attachment.UploadAttachmentResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = attachment.UploadAttachmentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.upload_attachment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, attachment.UploadAttachmentResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_upload_attachment_rest_interceptors(null_interceptor): - transport = transports.ChatServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ChatServiceRestInterceptor(), - ) - client = ChatServiceClient(transport=transport) - - with ( - mock.patch.object(type(client.transport._session), "request") as req, - mock.patch.object(path_template, "transcode") as transcode, - mock.patch.object( - transports.ChatServiceRestInterceptor, "post_upload_attachment" - ) as post, - mock.patch.object( - transports.ChatServiceRestInterceptor, - "post_upload_attachment_with_metadata", - ) as post_with_metadata, - mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_upload_attachment" - ) as pre, - ): - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = attachment.UploadAttachmentRequest.pb( - attachment.UploadAttachmentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = attachment.UploadAttachmentResponse.to_json( - attachment.UploadAttachmentResponse() - ) - req.return_value.content = return_value + request_init = {"space": {"name": "spaces/sample1"}} + request_init["space"] = { + "name": "spaces/sample1", + "type_": 1, + "space_type": 1, + "single_user_bot_dm": True, + "threaded": True, + "display_name": "display_name_value", + "external_user_allowed": True, + "space_threading_state": 2, + "space_details": { + "description": "description_value", + "guidelines": "guidelines_value", + }, + "space_history_state": 1, + "import_mode": True, + "create_time": {"seconds": 751, "nanos": 543}, + "last_active_time": {}, + "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, + "access_settings": {"access_state": 1, "audience": "audience_value"}, + "customer": "customer_value", + "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "assistant_managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, + "import_mode_expire_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - request = attachment.UploadAttachmentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = attachment.UploadAttachmentResponse() - post_with_metadata.return_value = ( - attachment.UploadAttachmentResponse(), - metadata, - ) + # Determine if the message type is proto-plus or protobuf + test_field = gc_space.UpdateSpaceRequest.meta.fields["space"] - client.upload_attachment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_list_spaces_rest_bad_request(request_type=space.ListSpacesRequest): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - # Mock the http request call within the method and fake a BadRequest error. - with ( - mock.patch.object(Session, "request") as req, - pytest.raises(core_exceptions.BadRequest), - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_spaces(request) + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["space"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - space.ListSpacesRequest, - dict, - ], -) -def test_list_spaces_rest_call_success(request_type): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = {} + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["space"][field])): + del request_init["space"][field][i][subfield] + else: + del request_init["space"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space.ListSpacesResponse( - next_page_token="next_page_token_value", + return_value = gc_space.Space( + name="name_value", + type_=gc_space.Space.Type.ROOM, + space_type=gc_space.Space.SpaceType.SPACE, + single_user_bot_dm=True, + threaded=True, + display_name="display_name_value", + external_user_allowed=True, + space_threading_state=gc_space.Space.SpaceThreadingState.THREADED_MESSAGES, + space_history_state=history_state.HistoryState.HISTORY_OFF, + import_mode=True, + admin_installed=True, + customer="customer_value", + space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -24426,20 +29723,35 @@ def test_list_spaces_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = space.ListSpacesResponse.pb(return_value) + return_value = gc_space.Space.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_spaces(request) + response = client.update_space(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSpacesPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gc_space.Space) + assert response.name == "name_value" + assert response.type_ == gc_space.Space.Type.ROOM + assert response.space_type == gc_space.Space.SpaceType.SPACE + assert response.single_user_bot_dm is True + assert response.threaded is True + assert response.display_name == "display_name_value" + assert response.external_user_allowed is True + assert ( + response.space_threading_state + == gc_space.Space.SpaceThreadingState.THREADED_MESSAGES + ) + assert response.space_history_state == history_state.HistoryState.HISTORY_OFF + assert response.import_mode is True + assert response.admin_installed is True + assert response.customer == "customer_value" + assert response.space_uri == "space_uri_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_spaces_rest_interceptors(null_interceptor): +def test_update_space_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24452,19 +29764,19 @@ def test_list_spaces_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_list_spaces" + transports.ChatServiceRestInterceptor, "post_update_space" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_list_spaces_with_metadata" + transports.ChatServiceRestInterceptor, "post_update_space_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_list_spaces" + transports.ChatServiceRestInterceptor, "pre_update_space" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = space.ListSpacesRequest.pb(space.ListSpacesRequest()) + pb_message = gc_space.UpdateSpaceRequest.pb(gc_space.UpdateSpaceRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -24475,19 +29787,19 @@ def test_list_spaces_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = space.ListSpacesResponse.to_json(space.ListSpacesResponse()) + return_value = gc_space.Space.to_json(gc_space.Space()) req.return_value.content = return_value - request = space.ListSpacesRequest() + request = gc_space.UpdateSpaceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = space.ListSpacesResponse() - post_with_metadata.return_value = space.ListSpacesResponse(), metadata + post.return_value = gc_space.Space() + post_with_metadata.return_value = gc_space.Space(), metadata - client.list_spaces( + client.update_space( request, metadata=[ ("key", "val"), @@ -24500,12 +29812,12 @@ def test_list_spaces_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_search_spaces_rest_bad_request(request_type=space.SearchSpacesRequest): +def test_delete_space_rest_bad_request(request_type=space.DeleteSpaceRequest): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"name": "spaces/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -24521,53 +29833,45 @@ def test_search_spaces_rest_bad_request(request_type=space.SearchSpacesRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.search_spaces(request) + client.delete_space(request) @pytest.mark.parametrize( "request_type", [ - space.SearchSpacesRequest, + space.DeleteSpaceRequest, dict, ], ) -def test_search_spaces_rest_call_success(request_type): +def test_delete_space_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"name": "spaces/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space.SearchSpacesResponse( - next_page_token="next_page_token_value", - total_size=1086, - ) + return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = space.SearchSpacesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_spaces(request) + response = client.delete_space(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchSpacesPager) - assert response.next_page_token == "next_page_token_value" - assert response.total_size == 1086 + assert response is None @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_spaces_rest_interceptors(null_interceptor): +def test_delete_space_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24580,19 +29884,11 @@ def test_search_spaces_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_search_spaces" - ) as post, - mock.patch.object( - transports.ChatServiceRestInterceptor, "post_search_spaces_with_metadata" - ) as post_with_metadata, - mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_search_spaces" + transports.ChatServiceRestInterceptor, "pre_delete_space" ) as pre, ): pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = space.SearchSpacesRequest.pb(space.SearchSpacesRequest()) + pb_message = space.DeleteSpaceRequest.pb(space.DeleteSpaceRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -24603,19 +29899,15 @@ def test_search_spaces_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = space.SearchSpacesResponse.to_json(space.SearchSpacesResponse()) - req.return_value.content = return_value - request = space.SearchSpacesRequest() + request = space.DeleteSpaceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = space.SearchSpacesResponse() - post_with_metadata.return_value = space.SearchSpacesResponse(), metadata - client.search_spaces( + client.delete_space( request, metadata=[ ("key", "val"), @@ -24624,11 +29916,11 @@ def test_search_spaces_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() -def test_get_space_rest_bad_request(request_type=space.GetSpaceRequest): +def test_complete_import_space_rest_bad_request( + request_type=space.CompleteImportSpaceRequest, +): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24649,17 +29941,17 @@ def test_get_space_rest_bad_request(request_type=space.GetSpaceRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_space(request) + client.complete_import_space(request) @pytest.mark.parametrize( "request_type", [ - space.GetSpaceRequest, + space.CompleteImportSpaceRequest, dict, ], ) -def test_get_space_rest_call_success(request_type): +def test_complete_import_space_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24671,57 +29963,26 @@ def test_get_space_rest_call_success(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space.Space( - name="name_value", - type_=space.Space.Type.ROOM, - space_type=space.Space.SpaceType.SPACE, - single_user_bot_dm=True, - threaded=True, - display_name="display_name_value", - external_user_allowed=True, - space_threading_state=space.Space.SpaceThreadingState.THREADED_MESSAGES, - space_history_state=history_state.HistoryState.HISTORY_OFF, - import_mode=True, - admin_installed=True, - customer="customer_value", - space_uri="space_uri_value", - predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, - ) + return_value = space.CompleteImportSpaceResponse() # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 # Convert return value to protobuf type - return_value = space.Space.pb(return_value) + return_value = space.CompleteImportSpaceResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_space(request) + response = client.complete_import_space(request) # Establish that the response is the type that we expect. - assert isinstance(response, space.Space) - assert response.name == "name_value" - assert response.type_ == space.Space.Type.ROOM - assert response.space_type == space.Space.SpaceType.SPACE - assert response.single_user_bot_dm is True - assert response.threaded is True - assert response.display_name == "display_name_value" - assert response.external_user_allowed is True - assert ( - response.space_threading_state - == space.Space.SpaceThreadingState.THREADED_MESSAGES - ) - assert response.space_history_state == history_state.HistoryState.HISTORY_OFF - assert response.import_mode is True - assert response.admin_installed is True - assert response.customer == "customer_value" - assert response.space_uri == "space_uri_value" + assert isinstance(response, space.CompleteImportSpaceResponse) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_space_rest_interceptors(null_interceptor): +def test_complete_import_space_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24734,19 +29995,22 @@ def test_get_space_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_get_space" + transports.ChatServiceRestInterceptor, "post_complete_import_space" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_get_space_with_metadata" + transports.ChatServiceRestInterceptor, + "post_complete_import_space_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_get_space" + transports.ChatServiceRestInterceptor, "pre_complete_import_space" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = space.GetSpaceRequest.pb(space.GetSpaceRequest()) + pb_message = space.CompleteImportSpaceRequest.pb( + space.CompleteImportSpaceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -24757,19 +30021,21 @@ def test_get_space_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = space.Space.to_json(space.Space()) + return_value = space.CompleteImportSpaceResponse.to_json( + space.CompleteImportSpaceResponse() + ) req.return_value.content = return_value - request = space.GetSpaceRequest() + request = space.CompleteImportSpaceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = space.Space() - post_with_metadata.return_value = space.Space(), metadata + post.return_value = space.CompleteImportSpaceResponse() + post_with_metadata.return_value = space.CompleteImportSpaceResponse(), metadata - client.get_space( + client.complete_import_space( request, metadata=[ ("key", "val"), @@ -24782,7 +30048,9 @@ def test_get_space_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_space_rest_bad_request(request_type=gc_space.CreateSpaceRequest): +def test_find_direct_message_rest_bad_request( + request_type=space.FindDirectMessageRequest, +): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -24803,152 +30071,43 @@ def test_create_space_rest_bad_request(request_type=gc_space.CreateSpaceRequest) response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_space(request) + client.find_direct_message(request) @pytest.mark.parametrize( "request_type", [ - gc_space.CreateSpaceRequest, + space.FindDirectMessageRequest, dict, ], ) -def test_create_space_rest_call_success(request_type): +def test_find_direct_message_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = {} - request_init["space"] = { - "name": "name_value", - "type_": 1, - "space_type": 1, - "single_user_bot_dm": True, - "threaded": True, - "display_name": "display_name_value", - "external_user_allowed": True, - "space_threading_state": 2, - "space_details": { - "description": "description_value", - "guidelines": "guidelines_value", - }, - "space_history_state": 1, - "import_mode": True, - "create_time": {"seconds": 751, "nanos": 543}, - "last_active_time": {}, - "admin_installed": True, - "membership_count": { - "joined_direct_human_user_count": 3185, - "joined_group_count": 1933, - }, - "access_settings": {"access_state": 1, "audience": "audience_value"}, - "customer": "customer_value", - "space_uri": "space_uri_value", - "predefined_permission_settings": 1, - "permission_settings": { - "manage_members_and_groups": { - "managers_allowed": True, - "assistant_managers_allowed": True, - "members_allowed": True, - }, - "modify_space_details": {}, - "toggle_history": {}, - "use_at_mention_all": {}, - "manage_apps": {}, - "manage_webhooks": {}, - "post_messages": {}, - "reply_messages": {}, - }, - "import_mode_expire_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gc_space.CreateSpaceRequest.meta.fields["space"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["space"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["space"][field])): - del request_init["space"][field][i][subfield] - else: - del request_init["space"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gc_space.Space( + return_value = space.Space( name="name_value", - type_=gc_space.Space.Type.ROOM, - space_type=gc_space.Space.SpaceType.SPACE, + type_=space.Space.Type.ROOM, + space_type=space.Space.SpaceType.SPACE, single_user_bot_dm=True, threaded=True, display_name="display_name_value", external_user_allowed=True, - space_threading_state=gc_space.Space.SpaceThreadingState.THREADED_MESSAGES, + space_threading_state=space.Space.SpaceThreadingState.THREADED_MESSAGES, space_history_state=history_state.HistoryState.HISTORY_OFF, import_mode=True, admin_installed=True, customer="customer_value", space_uri="space_uri_value", - predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -24956,25 +30115,25 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = gc_space.Space.pb(return_value) + return_value = space.Space.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_space(request) + response = client.find_direct_message(request) # Establish that the response is the type that we expect. - assert isinstance(response, gc_space.Space) + assert isinstance(response, space.Space) assert response.name == "name_value" - assert response.type_ == gc_space.Space.Type.ROOM - assert response.space_type == gc_space.Space.SpaceType.SPACE + assert response.type_ == space.Space.Type.ROOM + assert response.space_type == space.Space.SpaceType.SPACE assert response.single_user_bot_dm is True assert response.threaded is True assert response.display_name == "display_name_value" assert response.external_user_allowed is True assert ( response.space_threading_state - == gc_space.Space.SpaceThreadingState.THREADED_MESSAGES + == space.Space.SpaceThreadingState.THREADED_MESSAGES ) assert response.space_history_state == history_state.HistoryState.HISTORY_OFF assert response.import_mode is True @@ -24984,7 +30143,7 @@ def get_message_fields(field): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_space_rest_interceptors(null_interceptor): +def test_find_direct_message_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -24997,19 +30156,20 @@ def test_create_space_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_create_space" + transports.ChatServiceRestInterceptor, "post_find_direct_message" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_create_space_with_metadata" + transports.ChatServiceRestInterceptor, + "post_find_direct_message_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_create_space" + transports.ChatServiceRestInterceptor, "pre_find_direct_message" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = gc_space.CreateSpaceRequest.pb(gc_space.CreateSpaceRequest()) + pb_message = space.FindDirectMessageRequest.pb(space.FindDirectMessageRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -25020,19 +30180,19 @@ def test_create_space_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gc_space.Space.to_json(gc_space.Space()) + return_value = space.Space.to_json(space.Space()) req.return_value.content = return_value - request = gc_space.CreateSpaceRequest() + request = space.FindDirectMessageRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gc_space.Space() - post_with_metadata.return_value = gc_space.Space(), metadata + post.return_value = space.Space() + post_with_metadata.return_value = space.Space(), metadata - client.create_space( + client.find_direct_message( request, metadata=[ ("key", "val"), @@ -25045,12 +30205,14 @@ def test_create_space_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_set_up_space_rest_bad_request(request_type=space_setup.SetUpSpaceRequest): +def test_create_membership_rest_bad_request( + request_type=gc_membership.CreateMembershipRequest, +): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"parent": "spaces/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25066,43 +30228,114 @@ def test_set_up_space_rest_bad_request(request_type=space_setup.SetUpSpaceReques response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_up_space(request) + client.create_membership(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gc_membership.CreateMembershipRequest, + dict, + ], +) +def test_create_membership_rest_call_success(request_type): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "spaces/sample1"} + request_init["membership"] = { + "name": "name_value", + "state": 1, + "role": 1, + "member": { + "name": "name_value", + "display_name": "display_name_value", + "domain_id": "domain_id_value", + "type_": 1, + "is_anonymous": True, + }, + "group_member": {"name": "name_value"}, + "create_time": {"seconds": 751, "nanos": 543}, + "delete_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Determine if the message type is proto-plus or protobuf + test_field = gc_membership.CreateMembershipRequest.meta.fields["membership"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["membership"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - space_setup.SetUpSpaceRequest, - dict, - ], -) -def test_set_up_space_rest_call_success(request_type): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = {} + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["membership"][field])): + del request_init["membership"][field][i][subfield] + else: + del request_init["membership"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space.Space( + return_value = gc_membership.Membership( name="name_value", - type_=space.Space.Type.ROOM, - space_type=space.Space.SpaceType.SPACE, - single_user_bot_dm=True, - threaded=True, - display_name="display_name_value", - external_user_allowed=True, - space_threading_state=space.Space.SpaceThreadingState.THREADED_MESSAGES, - space_history_state=history_state.HistoryState.HISTORY_OFF, - import_mode=True, - admin_installed=True, - customer="customer_value", - space_uri="space_uri_value", - predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, + state=gc_membership.Membership.MembershipState.JOINED, + role=gc_membership.Membership.MembershipRole.ROLE_MEMBER, ) # Wrap the value into a proper Response obj @@ -25110,35 +30343,22 @@ def test_set_up_space_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = space.Space.pb(return_value) + return_value = gc_membership.Membership.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.set_up_space(request) + response = client.create_membership(request) # Establish that the response is the type that we expect. - assert isinstance(response, space.Space) + assert isinstance(response, gc_membership.Membership) assert response.name == "name_value" - assert response.type_ == space.Space.Type.ROOM - assert response.space_type == space.Space.SpaceType.SPACE - assert response.single_user_bot_dm is True - assert response.threaded is True - assert response.display_name == "display_name_value" - assert response.external_user_allowed is True - assert ( - response.space_threading_state - == space.Space.SpaceThreadingState.THREADED_MESSAGES - ) - assert response.space_history_state == history_state.HistoryState.HISTORY_OFF - assert response.import_mode is True - assert response.admin_installed is True - assert response.customer == "customer_value" - assert response.space_uri == "space_uri_value" + assert response.state == gc_membership.Membership.MembershipState.JOINED + assert response.role == gc_membership.Membership.MembershipRole.ROLE_MEMBER @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_up_space_rest_interceptors(null_interceptor): +def test_create_membership_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25151,19 +30371,22 @@ def test_set_up_space_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_set_up_space" + transports.ChatServiceRestInterceptor, "post_create_membership" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_set_up_space_with_metadata" + transports.ChatServiceRestInterceptor, + "post_create_membership_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_set_up_space" + transports.ChatServiceRestInterceptor, "pre_create_membership" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = space_setup.SetUpSpaceRequest.pb(space_setup.SetUpSpaceRequest()) + pb_message = gc_membership.CreateMembershipRequest.pb( + gc_membership.CreateMembershipRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -25174,19 +30397,19 @@ def test_set_up_space_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = space.Space.to_json(space.Space()) + return_value = gc_membership.Membership.to_json(gc_membership.Membership()) req.return_value.content = return_value - request = space_setup.SetUpSpaceRequest() + request = gc_membership.CreateMembershipRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = space.Space() - post_with_metadata.return_value = space.Space(), metadata + post.return_value = gc_membership.Membership() + post_with_metadata.return_value = gc_membership.Membership(), metadata - client.set_up_space( + client.create_membership( request, metadata=[ ("key", "val"), @@ -25199,12 +30422,14 @@ def test_set_up_space_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_space_rest_bad_request(request_type=gc_space.UpdateSpaceRequest): +def test_update_membership_rest_bad_request( + request_type=gc_membership.UpdateMembershipRequest, +): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"space": {"name": "spaces/sample1"}} + request_init = {"membership": {"name": "spaces/sample1/members/sample2"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25220,71 +30445,44 @@ def test_update_space_rest_bad_request(request_type=gc_space.UpdateSpaceRequest) response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_space(request) + client.update_membership(request) @pytest.mark.parametrize( "request_type", [ - gc_space.UpdateSpaceRequest, + gc_membership.UpdateMembershipRequest, dict, ], ) -def test_update_space_rest_call_success(request_type): +def test_update_membership_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"space": {"name": "spaces/sample1"}} - request_init["space"] = { - "name": "spaces/sample1", - "type_": 1, - "space_type": 1, - "single_user_bot_dm": True, - "threaded": True, - "display_name": "display_name_value", - "external_user_allowed": True, - "space_threading_state": 2, - "space_details": { - "description": "description_value", - "guidelines": "guidelines_value", + request_init = {"membership": {"name": "spaces/sample1/members/sample2"}} + request_init["membership"] = { + "name": "spaces/sample1/members/sample2", + "state": 1, + "role": 1, + "member": { + "name": "name_value", + "display_name": "display_name_value", + "domain_id": "domain_id_value", + "type_": 1, + "is_anonymous": True, }, - "space_history_state": 1, - "import_mode": True, + "group_member": {"name": "name_value"}, "create_time": {"seconds": 751, "nanos": 543}, - "last_active_time": {}, - "admin_installed": True, - "membership_count": { - "joined_direct_human_user_count": 3185, - "joined_group_count": 1933, - }, - "access_settings": {"access_state": 1, "audience": "audience_value"}, - "customer": "customer_value", - "space_uri": "space_uri_value", - "predefined_permission_settings": 1, - "permission_settings": { - "manage_members_and_groups": { - "managers_allowed": True, - "assistant_managers_allowed": True, - "members_allowed": True, - }, - "modify_space_details": {}, - "toggle_history": {}, - "use_at_mention_all": {}, - "manage_apps": {}, - "manage_webhooks": {}, - "post_messages": {}, - "reply_messages": {}, - }, - "import_mode_expire_time": {}, + "delete_time": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = gc_space.UpdateSpaceRequest.meta.fields["space"] + test_field = gc_membership.UpdateMembershipRequest.meta.fields["membership"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -25312,7 +30510,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["space"].items(): # pragma: NO COVER + for field, value in request_init["membership"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -25342,30 +30540,19 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["space"][field])): - del request_init["space"][field][i][subfield] + for i in range(0, len(request_init["membership"][field])): + del request_init["membership"][field][i][subfield] else: - del request_init["space"][field][subfield] + del request_init["membership"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gc_space.Space( + return_value = gc_membership.Membership( name="name_value", - type_=gc_space.Space.Type.ROOM, - space_type=gc_space.Space.SpaceType.SPACE, - single_user_bot_dm=True, - threaded=True, - display_name="display_name_value", - external_user_allowed=True, - space_threading_state=gc_space.Space.SpaceThreadingState.THREADED_MESSAGES, - space_history_state=history_state.HistoryState.HISTORY_OFF, - import_mode=True, - admin_installed=True, - customer="customer_value", - space_uri="space_uri_value", - predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, + state=gc_membership.Membership.MembershipState.JOINED, + role=gc_membership.Membership.MembershipRole.ROLE_MEMBER, ) # Wrap the value into a proper Response obj @@ -25373,35 +30560,22 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = gc_space.Space.pb(return_value) + return_value = gc_membership.Membership.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_space(request) + response = client.update_membership(request) # Establish that the response is the type that we expect. - assert isinstance(response, gc_space.Space) + assert isinstance(response, gc_membership.Membership) assert response.name == "name_value" - assert response.type_ == gc_space.Space.Type.ROOM - assert response.space_type == gc_space.Space.SpaceType.SPACE - assert response.single_user_bot_dm is True - assert response.threaded is True - assert response.display_name == "display_name_value" - assert response.external_user_allowed is True - assert ( - response.space_threading_state - == gc_space.Space.SpaceThreadingState.THREADED_MESSAGES - ) - assert response.space_history_state == history_state.HistoryState.HISTORY_OFF - assert response.import_mode is True - assert response.admin_installed is True - assert response.customer == "customer_value" - assert response.space_uri == "space_uri_value" + assert response.state == gc_membership.Membership.MembershipState.JOINED + assert response.role == gc_membership.Membership.MembershipRole.ROLE_MEMBER @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_space_rest_interceptors(null_interceptor): +def test_update_membership_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25414,19 +30588,22 @@ def test_update_space_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_update_space" + transports.ChatServiceRestInterceptor, "post_update_membership" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_update_space_with_metadata" + transports.ChatServiceRestInterceptor, + "post_update_membership_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_update_space" + transports.ChatServiceRestInterceptor, "pre_update_membership" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = gc_space.UpdateSpaceRequest.pb(gc_space.UpdateSpaceRequest()) + pb_message = gc_membership.UpdateMembershipRequest.pb( + gc_membership.UpdateMembershipRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -25437,19 +30614,19 @@ def test_update_space_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gc_space.Space.to_json(gc_space.Space()) + return_value = gc_membership.Membership.to_json(gc_membership.Membership()) req.return_value.content = return_value - request = gc_space.UpdateSpaceRequest() + request = gc_membership.UpdateMembershipRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gc_space.Space() - post_with_metadata.return_value = gc_space.Space(), metadata + post.return_value = gc_membership.Membership() + post_with_metadata.return_value = gc_membership.Membership(), metadata - client.update_space( + client.update_membership( request, metadata=[ ("key", "val"), @@ -25462,12 +30639,14 @@ def test_update_space_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_space_rest_bad_request(request_type=space.DeleteSpaceRequest): +def test_delete_membership_rest_bad_request( + request_type=membership.DeleteMembershipRequest, +): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1"} + request_init = {"name": "spaces/sample1/members/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25483,45 +30662,55 @@ def test_delete_space_rest_bad_request(request_type=space.DeleteSpaceRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_space(request) + client.delete_membership(request) @pytest.mark.parametrize( "request_type", [ - space.DeleteSpaceRequest, + membership.DeleteMembershipRequest, dict, ], ) -def test_delete_space_rest_call_success(request_type): +def test_delete_membership_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1"} + request_init = {"name": "spaces/sample1/members/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = membership.Membership( + name="name_value", + state=membership.Membership.MembershipState.JOINED, + role=membership.Membership.MembershipRole.ROLE_MEMBER, + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = membership.Membership.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_space(request) + response = client.delete_membership(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, membership.Membership) + assert response.name == "name_value" + assert response.state == membership.Membership.MembershipState.JOINED + assert response.role == membership.Membership.MembershipRole.ROLE_MEMBER @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_space_rest_interceptors(null_interceptor): +def test_delete_membership_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25534,11 +30723,22 @@ def test_delete_space_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_delete_space" + transports.ChatServiceRestInterceptor, "post_delete_membership" + ) as post, + mock.patch.object( + transports.ChatServiceRestInterceptor, + "post_delete_membership_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.ChatServiceRestInterceptor, "pre_delete_membership" ) as pre, ): pre.assert_not_called() - pb_message = space.DeleteSpaceRequest.pb(space.DeleteSpaceRequest()) + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = membership.DeleteMembershipRequest.pb( + membership.DeleteMembershipRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -25549,15 +30749,19 @@ def test_delete_space_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = membership.Membership.to_json(membership.Membership()) + req.return_value.content = return_value - request = space.DeleteSpaceRequest() + request = membership.DeleteMembershipRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = membership.Membership() + post_with_metadata.return_value = membership.Membership(), metadata - client.delete_space( + client.delete_membership( request, metadata=[ ("key", "val"), @@ -25566,16 +30770,18 @@ def test_delete_space_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_complete_import_space_rest_bad_request( - request_type=space.CompleteImportSpaceRequest, +def test_create_reaction_rest_bad_request( + request_type=gc_reaction.CreateReactionRequest, ): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1"} + request_init = {"parent": "spaces/sample1/messages/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25591,48 +30797,141 @@ def test_complete_import_space_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.complete_import_space(request) + client.create_reaction(request) @pytest.mark.parametrize( "request_type", [ - space.CompleteImportSpaceRequest, + gc_reaction.CreateReactionRequest, dict, ], ) -def test_complete_import_space_rest_call_success(request_type): +def test_create_reaction_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1"} + request_init = {"parent": "spaces/sample1/messages/sample2"} + request_init["reaction"] = { + "name": "name_value", + "user": { + "name": "name_value", + "display_name": "display_name_value", + "domain_id": "domain_id_value", + "type_": 1, + "is_anonymous": True, + }, + "emoji": { + "unicode": "unicode_value", + "custom_emoji": { + "name": "name_value", + "uid": "uid_value", + "emoji_name": "emoji_name_value", + "temporary_image_uri": "temporary_image_uri_value", + "payload": { + "file_content": b"file_content_blob", + "filename": "filename_value", + }, + }, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gc_reaction.CreateReactionRequest.meta.fields["reaction"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["reaction"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["reaction"][field])): + del request_init["reaction"][field][i][subfield] + else: + del request_init["reaction"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space.CompleteImportSpaceResponse() + return_value = gc_reaction.Reaction( + name="name_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 # Convert return value to protobuf type - return_value = space.CompleteImportSpaceResponse.pb(return_value) + return_value = gc_reaction.Reaction.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.complete_import_space(request) + response = client.create_reaction(request) # Establish that the response is the type that we expect. - assert isinstance(response, space.CompleteImportSpaceResponse) + assert isinstance(response, gc_reaction.Reaction) + assert response.name == "name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_complete_import_space_rest_interceptors(null_interceptor): +def test_create_reaction_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25645,21 +30944,20 @@ def test_complete_import_space_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_complete_import_space" + transports.ChatServiceRestInterceptor, "post_create_reaction" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, - "post_complete_import_space_with_metadata", + transports.ChatServiceRestInterceptor, "post_create_reaction_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_complete_import_space" + transports.ChatServiceRestInterceptor, "pre_create_reaction" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = space.CompleteImportSpaceRequest.pb( - space.CompleteImportSpaceRequest() + pb_message = gc_reaction.CreateReactionRequest.pb( + gc_reaction.CreateReactionRequest() ) transcode.return_value = { "method": "post", @@ -25671,21 +30969,19 @@ def test_complete_import_space_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = space.CompleteImportSpaceResponse.to_json( - space.CompleteImportSpaceResponse() - ) + return_value = gc_reaction.Reaction.to_json(gc_reaction.Reaction()) req.return_value.content = return_value - request = space.CompleteImportSpaceRequest() + request = gc_reaction.CreateReactionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = space.CompleteImportSpaceResponse() - post_with_metadata.return_value = space.CompleteImportSpaceResponse(), metadata + post.return_value = gc_reaction.Reaction() + post_with_metadata.return_value = gc_reaction.Reaction(), metadata - client.complete_import_space( + client.create_reaction( request, metadata=[ ("key", "val"), @@ -25698,14 +30994,12 @@ def test_complete_import_space_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_find_direct_message_rest_bad_request( - request_type=space.FindDirectMessageRequest, -): +def test_list_reactions_rest_bad_request(request_type=reaction.ListReactionsRequest): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"parent": "spaces/sample1/messages/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25721,43 +31015,30 @@ def test_find_direct_message_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.find_direct_message(request) + client.list_reactions(request) @pytest.mark.parametrize( "request_type", [ - space.FindDirectMessageRequest, + reaction.ListReactionsRequest, dict, ], ) -def test_find_direct_message_rest_call_success(request_type): +def test_list_reactions_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"parent": "spaces/sample1/messages/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space.Space( - name="name_value", - type_=space.Space.Type.ROOM, - space_type=space.Space.SpaceType.SPACE, - single_user_bot_dm=True, - threaded=True, - display_name="display_name_value", - external_user_allowed=True, - space_threading_state=space.Space.SpaceThreadingState.THREADED_MESSAGES, - space_history_state=history_state.HistoryState.HISTORY_OFF, - import_mode=True, - admin_installed=True, - customer="customer_value", - space_uri="space_uri_value", - predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, + return_value = reaction.ListReactionsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -25765,35 +31046,20 @@ def test_find_direct_message_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = space.Space.pb(return_value) + return_value = reaction.ListReactionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.find_direct_message(request) + response = client.list_reactions(request) # Establish that the response is the type that we expect. - assert isinstance(response, space.Space) - assert response.name == "name_value" - assert response.type_ == space.Space.Type.ROOM - assert response.space_type == space.Space.SpaceType.SPACE - assert response.single_user_bot_dm is True - assert response.threaded is True - assert response.display_name == "display_name_value" - assert response.external_user_allowed is True - assert ( - response.space_threading_state - == space.Space.SpaceThreadingState.THREADED_MESSAGES - ) - assert response.space_history_state == history_state.HistoryState.HISTORY_OFF - assert response.import_mode is True - assert response.admin_installed is True - assert response.customer == "customer_value" - assert response.space_uri == "space_uri_value" + assert isinstance(response, pagers.ListReactionsPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_find_direct_message_rest_interceptors(null_interceptor): +def test_list_reactions_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -25806,20 +31072,19 @@ def test_find_direct_message_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_find_direct_message" + transports.ChatServiceRestInterceptor, "post_list_reactions" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, - "post_find_direct_message_with_metadata", + transports.ChatServiceRestInterceptor, "post_list_reactions_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_find_direct_message" + transports.ChatServiceRestInterceptor, "pre_list_reactions" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = space.FindDirectMessageRequest.pb(space.FindDirectMessageRequest()) + pb_message = reaction.ListReactionsRequest.pb(reaction.ListReactionsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -25830,19 +31095,21 @@ def test_find_direct_message_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = space.Space.to_json(space.Space()) + return_value = reaction.ListReactionsResponse.to_json( + reaction.ListReactionsResponse() + ) req.return_value.content = return_value - request = space.FindDirectMessageRequest() + request = reaction.ListReactionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = space.Space() - post_with_metadata.return_value = space.Space(), metadata + post.return_value = reaction.ListReactionsResponse() + post_with_metadata.return_value = reaction.ListReactionsResponse(), metadata - client.find_direct_message( + client.list_reactions( request, metadata=[ ("key", "val"), @@ -25855,14 +31122,12 @@ def test_find_direct_message_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_membership_rest_bad_request( - request_type=gc_membership.CreateMembershipRequest, -): +def test_delete_reaction_rest_bad_request(request_type=reaction.DeleteReactionRequest): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "spaces/sample1"} + request_init = {"name": "spaces/sample1/messages/sample2/reactions/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -25878,137 +31143,45 @@ def test_create_membership_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_membership(request) + client.delete_reaction(request) @pytest.mark.parametrize( "request_type", [ - gc_membership.CreateMembershipRequest, + reaction.DeleteReactionRequest, dict, ], ) -def test_create_membership_rest_call_success(request_type): +def test_delete_reaction_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "spaces/sample1"} - request_init["membership"] = { - "name": "name_value", - "state": 1, - "role": 1, - "member": { - "name": "name_value", - "display_name": "display_name_value", - "domain_id": "domain_id_value", - "type_": 1, - "is_anonymous": True, - }, - "group_member": {"name": "name_value"}, - "create_time": {"seconds": 751, "nanos": 543}, - "delete_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gc_membership.CreateMembershipRequest.meta.fields["membership"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["membership"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["membership"][field])): - del request_init["membership"][field][i][subfield] - else: - del request_init["membership"][field][subfield] + request_init = {"name": "spaces/sample1/messages/sample2/reactions/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gc_membership.Membership( - name="name_value", - state=gc_membership.Membership.MembershipState.JOINED, - role=gc_membership.Membership.MembershipRole.ROLE_MEMBER, - ) + return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gc_membership.Membership.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_membership(request) + response = client.delete_reaction(request) # Establish that the response is the type that we expect. - assert isinstance(response, gc_membership.Membership) - assert response.name == "name_value" - assert response.state == gc_membership.Membership.MembershipState.JOINED - assert response.role == gc_membership.Membership.MembershipRole.ROLE_MEMBER + assert response is None @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_membership_rest_interceptors(null_interceptor): +def test_delete_reaction_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26021,22 +31194,11 @@ def test_create_membership_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_create_membership" - ) as post, - mock.patch.object( - transports.ChatServiceRestInterceptor, - "post_create_membership_with_metadata", - ) as post_with_metadata, - mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_create_membership" + transports.ChatServiceRestInterceptor, "pre_delete_reaction" ) as pre, ): pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gc_membership.CreateMembershipRequest.pb( - gc_membership.CreateMembershipRequest() - ) + pb_message = reaction.DeleteReactionRequest.pb(reaction.DeleteReactionRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -26047,19 +31209,15 @@ def test_create_membership_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gc_membership.Membership.to_json(gc_membership.Membership()) - req.return_value.content = return_value - request = gc_membership.CreateMembershipRequest() + request = reaction.DeleteReactionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gc_membership.Membership() - post_with_metadata.return_value = gc_membership.Membership(), metadata - client.create_membership( + client.delete_reaction( request, metadata=[ ("key", "val"), @@ -26068,18 +31226,16 @@ def test_create_membership_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() -def test_update_membership_rest_bad_request( - request_type=gc_membership.UpdateMembershipRequest, +def test_create_custom_emoji_rest_bad_request( + request_type=reaction.CreateCustomEmojiRequest, ): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"membership": {"name": "spaces/sample1/members/sample2"}} + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -26095,44 +31251,36 @@ def test_update_membership_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_membership(request) + client.create_custom_emoji(request) @pytest.mark.parametrize( "request_type", [ - gc_membership.UpdateMembershipRequest, + reaction.CreateCustomEmojiRequest, dict, ], ) -def test_update_membership_rest_call_success(request_type): +def test_create_custom_emoji_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"membership": {"name": "spaces/sample1/members/sample2"}} - request_init["membership"] = { - "name": "spaces/sample1/members/sample2", - "state": 1, - "role": 1, - "member": { - "name": "name_value", - "display_name": "display_name_value", - "domain_id": "domain_id_value", - "type_": 1, - "is_anonymous": True, - }, - "group_member": {"name": "name_value"}, - "create_time": {"seconds": 751, "nanos": 543}, - "delete_time": {}, + request_init = {} + request_init["custom_emoji"] = { + "name": "name_value", + "uid": "uid_value", + "emoji_name": "emoji_name_value", + "temporary_image_uri": "temporary_image_uri_value", + "payload": {"file_content": b"file_content_blob", "filename": "filename_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = gc_membership.UpdateMembershipRequest.meta.fields["membership"] + test_field = reaction.CreateCustomEmojiRequest.meta.fields["custom_emoji"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -26160,7 +31308,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["membership"].items(): # pragma: NO COVER + for field, value in request_init["custom_emoji"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -26190,19 +31338,20 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["membership"][field])): - del request_init["membership"][field][i][subfield] + for i in range(0, len(request_init["custom_emoji"][field])): + del request_init["custom_emoji"][field][i][subfield] else: - del request_init["membership"][field][subfield] + del request_init["custom_emoji"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gc_membership.Membership( + return_value = reaction.CustomEmoji( name="name_value", - state=gc_membership.Membership.MembershipState.JOINED, - role=gc_membership.Membership.MembershipRole.ROLE_MEMBER, + uid="uid_value", + emoji_name="emoji_name_value", + temporary_image_uri="temporary_image_uri_value", ) # Wrap the value into a proper Response obj @@ -26210,22 +31359,23 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = gc_membership.Membership.pb(return_value) + return_value = reaction.CustomEmoji.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_membership(request) + response = client.create_custom_emoji(request) # Establish that the response is the type that we expect. - assert isinstance(response, gc_membership.Membership) + assert isinstance(response, reaction.CustomEmoji) assert response.name == "name_value" - assert response.state == gc_membership.Membership.MembershipState.JOINED - assert response.role == gc_membership.Membership.MembershipRole.ROLE_MEMBER + assert response.uid == "uid_value" + assert response.emoji_name == "emoji_name_value" + assert response.temporary_image_uri == "temporary_image_uri_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_membership_rest_interceptors(null_interceptor): +def test_create_custom_emoji_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26238,21 +31388,21 @@ def test_update_membership_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_update_membership" + transports.ChatServiceRestInterceptor, "post_create_custom_emoji" ) as post, mock.patch.object( transports.ChatServiceRestInterceptor, - "post_update_membership_with_metadata", + "post_create_custom_emoji_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_update_membership" + transports.ChatServiceRestInterceptor, "pre_create_custom_emoji" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = gc_membership.UpdateMembershipRequest.pb( - gc_membership.UpdateMembershipRequest() + pb_message = reaction.CreateCustomEmojiRequest.pb( + reaction.CreateCustomEmojiRequest() ) transcode.return_value = { "method": "post", @@ -26264,19 +31414,19 @@ def test_update_membership_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gc_membership.Membership.to_json(gc_membership.Membership()) + return_value = reaction.CustomEmoji.to_json(reaction.CustomEmoji()) req.return_value.content = return_value - request = gc_membership.UpdateMembershipRequest() + request = reaction.CreateCustomEmojiRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gc_membership.Membership() - post_with_metadata.return_value = gc_membership.Membership(), metadata + post.return_value = reaction.CustomEmoji() + post_with_metadata.return_value = reaction.CustomEmoji(), metadata - client.update_membership( + client.create_custom_emoji( request, metadata=[ ("key", "val"), @@ -26289,14 +31439,12 @@ def test_update_membership_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_membership_rest_bad_request( - request_type=membership.DeleteMembershipRequest, -): +def test_get_custom_emoji_rest_bad_request(request_type=reaction.GetCustomEmojiRequest): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1/members/sample2"} + request_init = {"name": "customEmojis/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -26312,32 +31460,33 @@ def test_delete_membership_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_membership(request) + client.get_custom_emoji(request) @pytest.mark.parametrize( "request_type", [ - membership.DeleteMembershipRequest, + reaction.GetCustomEmojiRequest, dict, ], ) -def test_delete_membership_rest_call_success(request_type): +def test_get_custom_emoji_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1/members/sample2"} + request_init = {"name": "customEmojis/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = membership.Membership( + return_value = reaction.CustomEmoji( name="name_value", - state=membership.Membership.MembershipState.JOINED, - role=membership.Membership.MembershipRole.ROLE_MEMBER, + uid="uid_value", + emoji_name="emoji_name_value", + temporary_image_uri="temporary_image_uri_value", ) # Wrap the value into a proper Response obj @@ -26345,22 +31494,23 @@ def test_delete_membership_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = membership.Membership.pb(return_value) + return_value = reaction.CustomEmoji.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_membership(request) + response = client.get_custom_emoji(request) # Establish that the response is the type that we expect. - assert isinstance(response, membership.Membership) + assert isinstance(response, reaction.CustomEmoji) assert response.name == "name_value" - assert response.state == membership.Membership.MembershipState.JOINED - assert response.role == membership.Membership.MembershipRole.ROLE_MEMBER + assert response.uid == "uid_value" + assert response.emoji_name == "emoji_name_value" + assert response.temporary_image_uri == "temporary_image_uri_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_membership_rest_interceptors(null_interceptor): +def test_get_custom_emoji_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26373,22 +31523,19 @@ def test_delete_membership_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_delete_membership" + transports.ChatServiceRestInterceptor, "post_get_custom_emoji" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, - "post_delete_membership_with_metadata", + transports.ChatServiceRestInterceptor, "post_get_custom_emoji_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_delete_membership" + transports.ChatServiceRestInterceptor, "pre_get_custom_emoji" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = membership.DeleteMembershipRequest.pb( - membership.DeleteMembershipRequest() - ) + pb_message = reaction.GetCustomEmojiRequest.pb(reaction.GetCustomEmojiRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -26399,19 +31546,19 @@ def test_delete_membership_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = membership.Membership.to_json(membership.Membership()) + return_value = reaction.CustomEmoji.to_json(reaction.CustomEmoji()) req.return_value.content = return_value - request = membership.DeleteMembershipRequest() + request = reaction.GetCustomEmojiRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = membership.Membership() - post_with_metadata.return_value = membership.Membership(), metadata + post.return_value = reaction.CustomEmoji() + post_with_metadata.return_value = reaction.CustomEmoji(), metadata - client.delete_membership( + client.get_custom_emoji( request, metadata=[ ("key", "val"), @@ -26424,14 +31571,14 @@ def test_delete_membership_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_reaction_rest_bad_request( - request_type=gc_reaction.CreateReactionRequest, +def test_list_custom_emojis_rest_bad_request( + request_type=reaction.ListCustomEmojisRequest, ): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "spaces/sample1/messages/sample2"} + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -26447,120 +31594,30 @@ def test_create_reaction_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_reaction(request) + client.list_custom_emojis(request) @pytest.mark.parametrize( "request_type", [ - gc_reaction.CreateReactionRequest, + reaction.ListCustomEmojisRequest, dict, ], ) -def test_create_reaction_rest_call_success(request_type): +def test_list_custom_emojis_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "spaces/sample1/messages/sample2"} - request_init["reaction"] = { - "name": "name_value", - "user": { - "name": "name_value", - "display_name": "display_name_value", - "domain_id": "domain_id_value", - "type_": 1, - "is_anonymous": True, - }, - "emoji": { - "unicode": "unicode_value", - "custom_emoji": { - "name": "name_value", - "uid": "uid_value", - "emoji_name": "emoji_name_value", - "temporary_image_uri": "temporary_image_uri_value", - "payload": { - "file_content": b"file_content_blob", - "filename": "filename_value", - }, - }, - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gc_reaction.CreateReactionRequest.meta.fields["reaction"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["reaction"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["reaction"][field])): - del request_init["reaction"][field][i][subfield] - else: - del request_init["reaction"][field][subfield] + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gc_reaction.Reaction( - name="name_value", + return_value = reaction.ListCustomEmojisResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -26568,20 +31625,20 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = gc_reaction.Reaction.pb(return_value) + return_value = reaction.ListCustomEmojisResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_reaction(request) + response = client.list_custom_emojis(request) # Establish that the response is the type that we expect. - assert isinstance(response, gc_reaction.Reaction) - assert response.name == "name_value" + assert isinstance(response, pagers.ListCustomEmojisPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_reaction_rest_interceptors(null_interceptor): +def test_list_custom_emojis_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26594,20 +31651,21 @@ def test_create_reaction_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_create_reaction" + transports.ChatServiceRestInterceptor, "post_list_custom_emojis" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_create_reaction_with_metadata" + transports.ChatServiceRestInterceptor, + "post_list_custom_emojis_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_create_reaction" + transports.ChatServiceRestInterceptor, "pre_list_custom_emojis" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = gc_reaction.CreateReactionRequest.pb( - gc_reaction.CreateReactionRequest() + pb_message = reaction.ListCustomEmojisRequest.pb( + reaction.ListCustomEmojisRequest() ) transcode.return_value = { "method": "post", @@ -26619,19 +31677,21 @@ def test_create_reaction_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gc_reaction.Reaction.to_json(gc_reaction.Reaction()) + return_value = reaction.ListCustomEmojisResponse.to_json( + reaction.ListCustomEmojisResponse() + ) req.return_value.content = return_value - request = gc_reaction.CreateReactionRequest() + request = reaction.ListCustomEmojisRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gc_reaction.Reaction() - post_with_metadata.return_value = gc_reaction.Reaction(), metadata + post.return_value = reaction.ListCustomEmojisResponse() + post_with_metadata.return_value = reaction.ListCustomEmojisResponse(), metadata - client.create_reaction( + client.list_custom_emojis( request, metadata=[ ("key", "val"), @@ -26644,12 +31704,14 @@ def test_create_reaction_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_reactions_rest_bad_request(request_type=reaction.ListReactionsRequest): +def test_delete_custom_emoji_rest_bad_request( + request_type=reaction.DeleteCustomEmojiRequest, +): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "spaces/sample1/messages/sample2"} + request_init = {"name": "customEmojis/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -26665,51 +31727,45 @@ def test_list_reactions_rest_bad_request(request_type=reaction.ListReactionsRequ response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_reactions(request) + client.delete_custom_emoji(request) @pytest.mark.parametrize( "request_type", [ - reaction.ListReactionsRequest, + reaction.DeleteCustomEmojiRequest, dict, ], ) -def test_list_reactions_rest_call_success(request_type): +def test_delete_custom_emoji_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "spaces/sample1/messages/sample2"} + request_init = {"name": "customEmojis/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reaction.ListReactionsResponse( - next_page_token="next_page_token_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reaction.ListReactionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_reactions(request) + response = client.delete_custom_emoji(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReactionsPager) - assert response.next_page_token == "next_page_token_value" + assert response is None @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_reactions_rest_interceptors(null_interceptor): +def test_delete_custom_emoji_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26722,19 +31778,13 @@ def test_list_reactions_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_list_reactions" - ) as post, - mock.patch.object( - transports.ChatServiceRestInterceptor, "post_list_reactions_with_metadata" - ) as post_with_metadata, - mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_list_reactions" + transports.ChatServiceRestInterceptor, "pre_delete_custom_emoji" ) as pre, ): pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reaction.ListReactionsRequest.pb(reaction.ListReactionsRequest()) + pb_message = reaction.DeleteCustomEmojiRequest.pb( + reaction.DeleteCustomEmojiRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -26745,21 +31795,15 @@ def test_list_reactions_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reaction.ListReactionsResponse.to_json( - reaction.ListReactionsResponse() - ) - req.return_value.content = return_value - request = reaction.ListReactionsRequest() + request = reaction.DeleteCustomEmojiRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reaction.ListReactionsResponse() - post_with_metadata.return_value = reaction.ListReactionsResponse(), metadata - client.list_reactions( + client.delete_custom_emoji( request, metadata=[ ("key", "val"), @@ -26768,16 +31812,16 @@ def test_list_reactions_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() -def test_delete_reaction_rest_bad_request(request_type=reaction.DeleteReactionRequest): +def test_get_space_read_state_rest_bad_request( + request_type=space_read_state.GetSpaceReadStateRequest, +): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1/messages/sample2/reactions/sample3"} + request_init = {"name": "users/sample1/spaces/sample2/spaceReadState"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -26793,45 +31837,51 @@ def test_delete_reaction_rest_bad_request(request_type=reaction.DeleteReactionRe response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_reaction(request) + client.get_space_read_state(request) @pytest.mark.parametrize( "request_type", [ - reaction.DeleteReactionRequest, + space_read_state.GetSpaceReadStateRequest, dict, ], ) -def test_delete_reaction_rest_call_success(request_type): +def test_get_space_read_state_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1/messages/sample2/reactions/sample3"} + request_init = {"name": "users/sample1/spaces/sample2/spaceReadState"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = space_read_state.SpaceReadState( + name="name_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = space_read_state.SpaceReadState.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_reaction(request) + response = client.get_space_read_state(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, space_read_state.SpaceReadState) + assert response.name == "name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_reaction_rest_interceptors(null_interceptor): +def test_get_space_read_state_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -26844,11 +31894,22 @@ def test_delete_reaction_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_delete_reaction" + transports.ChatServiceRestInterceptor, "post_get_space_read_state" + ) as post, + mock.patch.object( + transports.ChatServiceRestInterceptor, + "post_get_space_read_state_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.ChatServiceRestInterceptor, "pre_get_space_read_state" ) as pre, ): pre.assert_not_called() - pb_message = reaction.DeleteReactionRequest.pb(reaction.DeleteReactionRequest()) + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = space_read_state.GetSpaceReadStateRequest.pb( + space_read_state.GetSpaceReadStateRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -26859,15 +31920,21 @@ def test_delete_reaction_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = space_read_state.SpaceReadState.to_json( + space_read_state.SpaceReadState() + ) + req.return_value.content = return_value - request = reaction.DeleteReactionRequest() + request = space_read_state.GetSpaceReadStateRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = space_read_state.SpaceReadState() + post_with_metadata.return_value = space_read_state.SpaceReadState(), metadata - client.delete_reaction( + client.get_space_read_state( request, metadata=[ ("key", "val"), @@ -26876,16 +31943,20 @@ def test_delete_reaction_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_create_custom_emoji_rest_bad_request( - request_type=reaction.CreateCustomEmojiRequest, +def test_update_space_read_state_rest_bad_request( + request_type=gc_space_read_state.UpdateSpaceReadStateRequest, ): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {} + request_init = { + "space_read_state": {"name": "users/sample1/spaces/sample2/spaceReadState"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -26901,36 +31972,37 @@ def test_create_custom_emoji_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_custom_emoji(request) + client.update_space_read_state(request) @pytest.mark.parametrize( "request_type", [ - reaction.CreateCustomEmojiRequest, + gc_space_read_state.UpdateSpaceReadStateRequest, dict, ], ) -def test_create_custom_emoji_rest_call_success(request_type): +def test_update_space_read_state_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {} - request_init["custom_emoji"] = { - "name": "name_value", - "uid": "uid_value", - "emoji_name": "emoji_name_value", - "temporary_image_uri": "temporary_image_uri_value", - "payload": {"file_content": b"file_content_blob", "filename": "filename_value"}, + request_init = { + "space_read_state": {"name": "users/sample1/spaces/sample2/spaceReadState"} + } + request_init["space_read_state"] = { + "name": "users/sample1/spaces/sample2/spaceReadState", + "last_read_time": {"seconds": 751, "nanos": 543}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = reaction.CreateCustomEmojiRequest.meta.fields["custom_emoji"] + test_field = gc_space_read_state.UpdateSpaceReadStateRequest.meta.fields[ + "space_read_state" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -26958,7 +32030,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["custom_emoji"].items(): # pragma: NO COVER + for field, value in request_init["space_read_state"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -26988,20 +32060,17 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["custom_emoji"][field])): - del request_init["custom_emoji"][field][i][subfield] + for i in range(0, len(request_init["space_read_state"][field])): + del request_init["space_read_state"][field][i][subfield] else: - del request_init["custom_emoji"][field][subfield] + del request_init["space_read_state"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reaction.CustomEmoji( + return_value = gc_space_read_state.SpaceReadState( name="name_value", - uid="uid_value", - emoji_name="emoji_name_value", - temporary_image_uri="temporary_image_uri_value", ) # Wrap the value into a proper Response obj @@ -27009,23 +32078,20 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = reaction.CustomEmoji.pb(return_value) + return_value = gc_space_read_state.SpaceReadState.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_custom_emoji(request) + response = client.update_space_read_state(request) # Establish that the response is the type that we expect. - assert isinstance(response, reaction.CustomEmoji) + assert isinstance(response, gc_space_read_state.SpaceReadState) assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.emoji_name == "emoji_name_value" - assert response.temporary_image_uri == "temporary_image_uri_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_custom_emoji_rest_interceptors(null_interceptor): +def test_update_space_read_state_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27038,21 +32104,21 @@ def test_create_custom_emoji_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_create_custom_emoji" + transports.ChatServiceRestInterceptor, "post_update_space_read_state" ) as post, mock.patch.object( transports.ChatServiceRestInterceptor, - "post_create_custom_emoji_with_metadata", + "post_update_space_read_state_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_create_custom_emoji" + transports.ChatServiceRestInterceptor, "pre_update_space_read_state" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reaction.CreateCustomEmojiRequest.pb( - reaction.CreateCustomEmojiRequest() + pb_message = gc_space_read_state.UpdateSpaceReadStateRequest.pb( + gc_space_read_state.UpdateSpaceReadStateRequest() ) transcode.return_value = { "method": "post", @@ -27064,19 +32130,21 @@ def test_create_custom_emoji_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reaction.CustomEmoji.to_json(reaction.CustomEmoji()) + return_value = gc_space_read_state.SpaceReadState.to_json( + gc_space_read_state.SpaceReadState() + ) req.return_value.content = return_value - request = reaction.CreateCustomEmojiRequest() + request = gc_space_read_state.UpdateSpaceReadStateRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reaction.CustomEmoji() - post_with_metadata.return_value = reaction.CustomEmoji(), metadata + post.return_value = gc_space_read_state.SpaceReadState() + post_with_metadata.return_value = gc_space_read_state.SpaceReadState(), metadata - client.create_custom_emoji( + client.update_space_read_state( request, metadata=[ ("key", "val"), @@ -27089,12 +32157,16 @@ def test_create_custom_emoji_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_custom_emoji_rest_bad_request(request_type=reaction.GetCustomEmojiRequest): +def test_get_thread_read_state_rest_bad_request( + request_type=thread_read_state.GetThreadReadStateRequest, +): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "customEmojis/sample1"} + request_init = { + "name": "users/sample1/spaces/sample2/threads/sample3/threadReadState" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27110,33 +32182,166 @@ def test_get_custom_emoji_rest_bad_request(request_type=reaction.GetCustomEmojiR response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_custom_emoji(request) + client.get_thread_read_state(request) @pytest.mark.parametrize( "request_type", [ - reaction.GetCustomEmojiRequest, + thread_read_state.GetThreadReadStateRequest, dict, ], ) -def test_get_custom_emoji_rest_call_success(request_type): +def test_get_thread_read_state_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "customEmojis/sample1"} + request_init = { + "name": "users/sample1/spaces/sample2/threads/sample3/threadReadState" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = thread_read_state.ThreadReadState( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = thread_read_state.ThreadReadState.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_thread_read_state(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, thread_read_state.ThreadReadState) + assert response.name == "name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_thread_read_state_rest_interceptors(null_interceptor): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ChatServiceRestInterceptor(), + ) + client = ChatServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ChatServiceRestInterceptor, "post_get_thread_read_state" + ) as post, + mock.patch.object( + transports.ChatServiceRestInterceptor, + "post_get_thread_read_state_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.ChatServiceRestInterceptor, "pre_get_thread_read_state" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = thread_read_state.GetThreadReadStateRequest.pb( + thread_read_state.GetThreadReadStateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = thread_read_state.ThreadReadState.to_json( + thread_read_state.ThreadReadState() + ) + req.return_value.content = return_value + + request = thread_read_state.GetThreadReadStateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = thread_read_state.ThreadReadState() + post_with_metadata.return_value = thread_read_state.ThreadReadState(), metadata + + client.get_thread_read_state( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_space_event_rest_bad_request( + request_type=space_event.GetSpaceEventRequest, +): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "spaces/sample1/spaceEvents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_space_event(request) + + +@pytest.mark.parametrize( + "request_type", + [ + space_event.GetSpaceEventRequest, + dict, + ], +) +def test_get_space_event_rest_call_success(request_type): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "spaces/sample1/spaceEvents/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reaction.CustomEmoji( + return_value = space_event.SpaceEvent( name="name_value", - uid="uid_value", - emoji_name="emoji_name_value", - temporary_image_uri="temporary_image_uri_value", + event_type="event_type_value", ) # Wrap the value into a proper Response obj @@ -27144,23 +32349,21 @@ def test_get_custom_emoji_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = reaction.CustomEmoji.pb(return_value) + return_value = space_event.SpaceEvent.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_custom_emoji(request) + response = client.get_space_event(request) # Establish that the response is the type that we expect. - assert isinstance(response, reaction.CustomEmoji) + assert isinstance(response, space_event.SpaceEvent) assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.emoji_name == "emoji_name_value" - assert response.temporary_image_uri == "temporary_image_uri_value" + assert response.event_type == "event_type_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_custom_emoji_rest_interceptors(null_interceptor): +def test_get_space_event_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27173,19 +32376,21 @@ def test_get_custom_emoji_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_get_custom_emoji" + transports.ChatServiceRestInterceptor, "post_get_space_event" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_get_custom_emoji_with_metadata" + transports.ChatServiceRestInterceptor, "post_get_space_event_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_get_custom_emoji" + transports.ChatServiceRestInterceptor, "pre_get_space_event" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reaction.GetCustomEmojiRequest.pb(reaction.GetCustomEmojiRequest()) + pb_message = space_event.GetSpaceEventRequest.pb( + space_event.GetSpaceEventRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -27196,19 +32401,19 @@ def test_get_custom_emoji_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reaction.CustomEmoji.to_json(reaction.CustomEmoji()) + return_value = space_event.SpaceEvent.to_json(space_event.SpaceEvent()) req.return_value.content = return_value - request = reaction.GetCustomEmojiRequest() + request = space_event.GetSpaceEventRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reaction.CustomEmoji() - post_with_metadata.return_value = reaction.CustomEmoji(), metadata + post.return_value = space_event.SpaceEvent() + post_with_metadata.return_value = space_event.SpaceEvent(), metadata - client.get_custom_emoji( + client.get_space_event( request, metadata=[ ("key", "val"), @@ -27221,14 +32426,14 @@ def test_get_custom_emoji_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_custom_emojis_rest_bad_request( - request_type=reaction.ListCustomEmojisRequest, +def test_list_space_events_rest_bad_request( + request_type=space_event.ListSpaceEventsRequest, ): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"parent": "spaces/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27244,29 +32449,29 @@ def test_list_custom_emojis_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_custom_emojis(request) + client.list_space_events(request) @pytest.mark.parametrize( "request_type", [ - reaction.ListCustomEmojisRequest, + space_event.ListSpaceEventsRequest, dict, ], ) -def test_list_custom_emojis_rest_call_success(request_type): +def test_list_space_events_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"parent": "spaces/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reaction.ListCustomEmojisResponse( + return_value = space_event.ListSpaceEventsResponse( next_page_token="next_page_token_value", ) @@ -27275,20 +32480,20 @@ def test_list_custom_emojis_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = reaction.ListCustomEmojisResponse.pb(return_value) + return_value = space_event.ListSpaceEventsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_custom_emojis(request) + response = client.list_space_events(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCustomEmojisPager) + assert isinstance(response, pagers.ListSpaceEventsPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_custom_emojis_rest_interceptors(null_interceptor): +def test_list_space_events_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27301,21 +32506,21 @@ def test_list_custom_emojis_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_list_custom_emojis" + transports.ChatServiceRestInterceptor, "post_list_space_events" ) as post, mock.patch.object( transports.ChatServiceRestInterceptor, - "post_list_custom_emojis_with_metadata", + "post_list_space_events_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_list_custom_emojis" + transports.ChatServiceRestInterceptor, "pre_list_space_events" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reaction.ListCustomEmojisRequest.pb( - reaction.ListCustomEmojisRequest() + pb_message = space_event.ListSpaceEventsRequest.pb( + space_event.ListSpaceEventsRequest() ) transcode.return_value = { "method": "post", @@ -27327,21 +32532,24 @@ def test_list_custom_emojis_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reaction.ListCustomEmojisResponse.to_json( - reaction.ListCustomEmojisResponse() + return_value = space_event.ListSpaceEventsResponse.to_json( + space_event.ListSpaceEventsResponse() ) req.return_value.content = return_value - request = reaction.ListCustomEmojisRequest() + request = space_event.ListSpaceEventsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reaction.ListCustomEmojisResponse() - post_with_metadata.return_value = reaction.ListCustomEmojisResponse(), metadata + post.return_value = space_event.ListSpaceEventsResponse() + post_with_metadata.return_value = ( + space_event.ListSpaceEventsResponse(), + metadata, + ) - client.list_custom_emojis( + client.list_space_events( request, metadata=[ ("key", "val"), @@ -27354,14 +32562,14 @@ def test_list_custom_emojis_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_custom_emoji_rest_bad_request( - request_type=reaction.DeleteCustomEmojiRequest, +def test_get_space_notification_setting_rest_bad_request( + request_type=space_notification_setting.GetSpaceNotificationSettingRequest, ): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "customEmojis/sample1"} + request_init = {"name": "users/sample1/spaces/sample2/spaceNotificationSetting"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27377,45 +32585,63 @@ def test_delete_custom_emoji_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_custom_emoji(request) + client.get_space_notification_setting(request) @pytest.mark.parametrize( "request_type", [ - reaction.DeleteCustomEmojiRequest, + space_notification_setting.GetSpaceNotificationSettingRequest, dict, ], ) -def test_delete_custom_emoji_rest_call_success(request_type): +def test_get_space_notification_setting_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "customEmojis/sample1"} + request_init = {"name": "users/sample1/spaces/sample2/spaceNotificationSetting"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = space_notification_setting.SpaceNotificationSetting( + name="name_value", + notification_setting=space_notification_setting.SpaceNotificationSetting.NotificationSetting.ALL, + mute_setting=space_notification_setting.SpaceNotificationSetting.MuteSetting.UNMUTED, + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = space_notification_setting.SpaceNotificationSetting.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_custom_emoji(request) + response = client.get_space_notification_setting(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, space_notification_setting.SpaceNotificationSetting) + assert response.name == "name_value" + assert ( + response.notification_setting + == space_notification_setting.SpaceNotificationSetting.NotificationSetting.ALL + ) + assert ( + response.mute_setting + == space_notification_setting.SpaceNotificationSetting.MuteSetting.UNMUTED + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_custom_emoji_rest_interceptors(null_interceptor): +def test_get_space_notification_setting_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27428,12 +32654,21 @@ def test_delete_custom_emoji_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_delete_custom_emoji" + transports.ChatServiceRestInterceptor, "post_get_space_notification_setting" + ) as post, + mock.patch.object( + transports.ChatServiceRestInterceptor, + "post_get_space_notification_setting_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.ChatServiceRestInterceptor, "pre_get_space_notification_setting" ) as pre, ): pre.assert_not_called() - pb_message = reaction.DeleteCustomEmojiRequest.pb( - reaction.DeleteCustomEmojiRequest() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = space_notification_setting.GetSpaceNotificationSettingRequest.pb( + space_notification_setting.GetSpaceNotificationSettingRequest() ) transcode.return_value = { "method": "post", @@ -27445,15 +32680,24 @@ def test_delete_custom_emoji_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = space_notification_setting.SpaceNotificationSetting.to_json( + space_notification_setting.SpaceNotificationSetting() + ) + req.return_value.content = return_value - request = reaction.DeleteCustomEmojiRequest() + request = space_notification_setting.GetSpaceNotificationSettingRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = space_notification_setting.SpaceNotificationSetting() + post_with_metadata.return_value = ( + space_notification_setting.SpaceNotificationSetting(), + metadata, + ) - client.delete_custom_emoji( + client.get_space_notification_setting( request, metadata=[ ("key", "val"), @@ -27462,16 +32706,22 @@ def test_delete_custom_emoji_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_get_space_read_state_rest_bad_request( - request_type=space_read_state.GetSpaceReadStateRequest, +def test_update_space_notification_setting_rest_bad_request( + request_type=gc_space_notification_setting.UpdateSpaceNotificationSettingRequest, ): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "users/sample1/spaces/sample2/spaceReadState"} + request_init = { + "space_notification_setting": { + "name": "users/sample1/spaces/sample2/spaceNotificationSetting" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27487,30 +32737,116 @@ def test_get_space_read_state_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_space_read_state(request) + client.update_space_notification_setting(request) @pytest.mark.parametrize( "request_type", [ - space_read_state.GetSpaceReadStateRequest, + gc_space_notification_setting.UpdateSpaceNotificationSettingRequest, dict, ], ) -def test_get_space_read_state_rest_call_success(request_type): +def test_update_space_notification_setting_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "users/sample1/spaces/sample2/spaceReadState"} + request_init = { + "space_notification_setting": { + "name": "users/sample1/spaces/sample2/spaceNotificationSetting" + } + } + request_init["space_notification_setting"] = { + "name": "users/sample1/spaces/sample2/spaceNotificationSetting", + "notification_setting": 1, + "mute_setting": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + gc_space_notification_setting.UpdateSpaceNotificationSettingRequest.meta.fields[ + "space_notification_setting" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "space_notification_setting" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["space_notification_setting"][field]) + ): + del request_init["space_notification_setting"][field][i][subfield] + else: + del request_init["space_notification_setting"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space_read_state.SpaceReadState( + return_value = gc_space_notification_setting.SpaceNotificationSetting( name="name_value", + notification_setting=gc_space_notification_setting.SpaceNotificationSetting.NotificationSetting.ALL, + mute_setting=gc_space_notification_setting.SpaceNotificationSetting.MuteSetting.UNMUTED, ) # Wrap the value into a proper Response obj @@ -27518,20 +32854,30 @@ def test_get_space_read_state_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = space_read_state.SpaceReadState.pb(return_value) + return_value = gc_space_notification_setting.SpaceNotificationSetting.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_space_read_state(request) + response = client.update_space_notification_setting(request) # Establish that the response is the type that we expect. - assert isinstance(response, space_read_state.SpaceReadState) + assert isinstance(response, gc_space_notification_setting.SpaceNotificationSetting) assert response.name == "name_value" + assert ( + response.notification_setting + == gc_space_notification_setting.SpaceNotificationSetting.NotificationSetting.ALL + ) + assert ( + response.mute_setting + == gc_space_notification_setting.SpaceNotificationSetting.MuteSetting.UNMUTED + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_space_read_state_rest_interceptors(null_interceptor): +def test_update_space_notification_setting_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27544,21 +32890,25 @@ def test_get_space_read_state_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_get_space_read_state" + transports.ChatServiceRestInterceptor, + "post_update_space_notification_setting", ) as post, mock.patch.object( transports.ChatServiceRestInterceptor, - "post_get_space_read_state_with_metadata", + "post_update_space_notification_setting_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_get_space_read_state" + transports.ChatServiceRestInterceptor, + "pre_update_space_notification_setting", ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = space_read_state.GetSpaceReadStateRequest.pb( - space_read_state.GetSpaceReadStateRequest() + pb_message = ( + gc_space_notification_setting.UpdateSpaceNotificationSettingRequest.pb( + gc_space_notification_setting.UpdateSpaceNotificationSettingRequest() + ) ) transcode.return_value = { "method": "post", @@ -27570,21 +32920,24 @@ def test_get_space_read_state_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = space_read_state.SpaceReadState.to_json( - space_read_state.SpaceReadState() + return_value = gc_space_notification_setting.SpaceNotificationSetting.to_json( + gc_space_notification_setting.SpaceNotificationSetting() ) req.return_value.content = return_value - request = space_read_state.GetSpaceReadStateRequest() + request = gc_space_notification_setting.UpdateSpaceNotificationSettingRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = space_read_state.SpaceReadState() - post_with_metadata.return_value = space_read_state.SpaceReadState(), metadata + post.return_value = gc_space_notification_setting.SpaceNotificationSetting() + post_with_metadata.return_value = ( + gc_space_notification_setting.SpaceNotificationSetting(), + metadata, + ) - client.get_space_read_state( + client.update_space_notification_setting( request, metadata=[ ("key", "val"), @@ -27597,16 +32950,12 @@ def test_get_space_read_state_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_space_read_state_rest_bad_request( - request_type=gc_space_read_state.UpdateSpaceReadStateRequest, -): +def test_create_section_rest_bad_request(request_type=gc_section.CreateSectionRequest): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "space_read_state": {"name": "users/sample1/spaces/sample2/spaceReadState"} - } + request_init = {"parent": "users/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27622,37 +32971,35 @@ def test_update_space_read_state_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_space_read_state(request) + client.create_section(request) @pytest.mark.parametrize( "request_type", [ - gc_space_read_state.UpdateSpaceReadStateRequest, + gc_section.CreateSectionRequest, dict, ], ) -def test_update_space_read_state_rest_call_success(request_type): +def test_create_section_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "space_read_state": {"name": "users/sample1/spaces/sample2/spaceReadState"} - } - request_init["space_read_state"] = { - "name": "users/sample1/spaces/sample2/spaceReadState", - "last_read_time": {"seconds": 751, "nanos": 543}, + request_init = {"parent": "users/sample1"} + request_init["section"] = { + "name": "name_value", + "display_name": "display_name_value", + "sort_order": 1091, + "type_": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = gc_space_read_state.UpdateSpaceReadStateRequest.meta.fields[ - "space_read_state" - ] + test_field = gc_section.CreateSectionRequest.meta.fields["section"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -27680,7 +33027,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["space_read_state"].items(): # pragma: NO COVER + for field, value in request_init["section"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -27710,17 +33057,20 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["space_read_state"][field])): - del request_init["space_read_state"][field][i][subfield] + for i in range(0, len(request_init["section"][field])): + del request_init["section"][field][i][subfield] else: - del request_init["space_read_state"][field][subfield] + del request_init["section"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gc_space_read_state.SpaceReadState( + return_value = gc_section.Section( name="name_value", + display_name="display_name_value", + sort_order=1091, + type_=gc_section.Section.SectionType.CUSTOM_SECTION, ) # Wrap the value into a proper Response obj @@ -27728,20 +33078,23 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = gc_space_read_state.SpaceReadState.pb(return_value) + return_value = gc_section.Section.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_space_read_state(request) + response = client.create_section(request) # Establish that the response is the type that we expect. - assert isinstance(response, gc_space_read_state.SpaceReadState) + assert isinstance(response, gc_section.Section) assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.sort_order == 1091 + assert response.type_ == gc_section.Section.SectionType.CUSTOM_SECTION @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_space_read_state_rest_interceptors(null_interceptor): +def test_create_section_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27754,21 +33107,20 @@ def test_update_space_read_state_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_update_space_read_state" + transports.ChatServiceRestInterceptor, "post_create_section" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, - "post_update_space_read_state_with_metadata", + transports.ChatServiceRestInterceptor, "post_create_section_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_update_space_read_state" + transports.ChatServiceRestInterceptor, "pre_create_section" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = gc_space_read_state.UpdateSpaceReadStateRequest.pb( - gc_space_read_state.UpdateSpaceReadStateRequest() + pb_message = gc_section.CreateSectionRequest.pb( + gc_section.CreateSectionRequest() ) transcode.return_value = { "method": "post", @@ -27780,21 +33132,19 @@ def test_update_space_read_state_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gc_space_read_state.SpaceReadState.to_json( - gc_space_read_state.SpaceReadState() - ) + return_value = gc_section.Section.to_json(gc_section.Section()) req.return_value.content = return_value - request = gc_space_read_state.UpdateSpaceReadStateRequest() + request = gc_section.CreateSectionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gc_space_read_state.SpaceReadState() - post_with_metadata.return_value = gc_space_read_state.SpaceReadState(), metadata + post.return_value = gc_section.Section() + post_with_metadata.return_value = gc_section.Section(), metadata - client.update_space_read_state( + client.create_section( request, metadata=[ ("key", "val"), @@ -27807,57 +33157,233 @@ def test_update_space_read_state_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_thread_read_state_rest_bad_request( - request_type=thread_read_state.GetThreadReadStateRequest, -): +def test_delete_section_rest_bad_request(request_type=section.DeleteSectionRequest): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "users/sample1/spaces/sample2/threads/sample3/threadReadState" - } + request_init = {"name": "users/sample1/sections/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_section(request) + + +@pytest.mark.parametrize( + "request_type", + [ + section.DeleteSectionRequest, + dict, + ], +) +def test_delete_section_rest_call_success(request_type): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "users/sample1/sections/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_section(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_section_rest_interceptors(null_interceptor): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ChatServiceRestInterceptor(), + ) + client = ChatServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ChatServiceRestInterceptor, "pre_delete_section" + ) as pre, + ): + pre.assert_not_called() + pb_message = section.DeleteSectionRequest.pb(section.DeleteSectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = section.DeleteSectionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_section( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_update_section_rest_bad_request(request_type=gc_section.UpdateSectionRequest): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"section": {"name": "users/sample1/sections/sample2"}} request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with ( - mock.patch.object(Session, "request") as req, - pytest.raises(core_exceptions.BadRequest), - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_thread_read_state(request) + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_section(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gc_section.UpdateSectionRequest, + dict, + ], +) +def test_update_section_rest_call_success(request_type): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"section": {"name": "users/sample1/sections/sample2"}} + request_init["section"] = { + "name": "users/sample1/sections/sample2", + "display_name": "display_name_value", + "sort_order": 1091, + "type_": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gc_section.UpdateSectionRequest.meta.fields["section"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["section"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - thread_read_state.GetThreadReadStateRequest, - dict, - ], -) -def test_get_thread_read_state_rest_call_success(request_type): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = { - "name": "users/sample1/spaces/sample2/threads/sample3/threadReadState" - } + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["section"][field])): + del request_init["section"][field][i][subfield] + else: + del request_init["section"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = thread_read_state.ThreadReadState( + return_value = gc_section.Section( name="name_value", + display_name="display_name_value", + sort_order=1091, + type_=gc_section.Section.SectionType.CUSTOM_SECTION, ) # Wrap the value into a proper Response obj @@ -27865,20 +33391,23 @@ def test_get_thread_read_state_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = thread_read_state.ThreadReadState.pb(return_value) + return_value = gc_section.Section.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_thread_read_state(request) + response = client.update_section(request) # Establish that the response is the type that we expect. - assert isinstance(response, thread_read_state.ThreadReadState) + assert isinstance(response, gc_section.Section) assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.sort_order == 1091 + assert response.type_ == gc_section.Section.SectionType.CUSTOM_SECTION @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_thread_read_state_rest_interceptors(null_interceptor): +def test_update_section_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -27891,21 +33420,20 @@ def test_get_thread_read_state_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_get_thread_read_state" + transports.ChatServiceRestInterceptor, "post_update_section" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, - "post_get_thread_read_state_with_metadata", + transports.ChatServiceRestInterceptor, "post_update_section_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_get_thread_read_state" + transports.ChatServiceRestInterceptor, "pre_update_section" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = thread_read_state.GetThreadReadStateRequest.pb( - thread_read_state.GetThreadReadStateRequest() + pb_message = gc_section.UpdateSectionRequest.pb( + gc_section.UpdateSectionRequest() ) transcode.return_value = { "method": "post", @@ -27917,21 +33445,19 @@ def test_get_thread_read_state_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = thread_read_state.ThreadReadState.to_json( - thread_read_state.ThreadReadState() - ) + return_value = gc_section.Section.to_json(gc_section.Section()) req.return_value.content = return_value - request = thread_read_state.GetThreadReadStateRequest() + request = gc_section.UpdateSectionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = thread_read_state.ThreadReadState() - post_with_metadata.return_value = thread_read_state.ThreadReadState(), metadata + post.return_value = gc_section.Section() + post_with_metadata.return_value = gc_section.Section(), metadata - client.get_thread_read_state( + client.update_section( request, metadata=[ ("key", "val"), @@ -27944,14 +33470,12 @@ def test_get_thread_read_state_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_space_event_rest_bad_request( - request_type=space_event.GetSpaceEventRequest, -): +def test_list_sections_rest_bad_request(request_type=section.ListSectionsRequest): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1/spaceEvents/sample2"} + request_init = {"parent": "users/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -27967,31 +33491,30 @@ def test_get_space_event_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_space_event(request) + client.list_sections(request) @pytest.mark.parametrize( "request_type", [ - space_event.GetSpaceEventRequest, + section.ListSectionsRequest, dict, ], ) -def test_get_space_event_rest_call_success(request_type): +def test_list_sections_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1/spaceEvents/sample2"} + request_init = {"parent": "users/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space_event.SpaceEvent( - name="name_value", - event_type="event_type_value", + return_value = section.ListSectionsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -27999,21 +33522,20 @@ def test_get_space_event_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = space_event.SpaceEvent.pb(return_value) + return_value = section.ListSectionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_space_event(request) + response = client.list_sections(request) # Establish that the response is the type that we expect. - assert isinstance(response, space_event.SpaceEvent) - assert response.name == "name_value" - assert response.event_type == "event_type_value" + assert isinstance(response, pagers.ListSectionsPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_space_event_rest_interceptors(null_interceptor): +def test_list_sections_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -28026,21 +33548,19 @@ def test_get_space_event_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_get_space_event" + transports.ChatServiceRestInterceptor, "post_list_sections" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_get_space_event_with_metadata" + transports.ChatServiceRestInterceptor, "post_list_sections_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_get_space_event" + transports.ChatServiceRestInterceptor, "pre_list_sections" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = space_event.GetSpaceEventRequest.pb( - space_event.GetSpaceEventRequest() - ) + pb_message = section.ListSectionsRequest.pb(section.ListSectionsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -28051,19 +33571,21 @@ def test_get_space_event_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = space_event.SpaceEvent.to_json(space_event.SpaceEvent()) + return_value = section.ListSectionsResponse.to_json( + section.ListSectionsResponse() + ) req.return_value.content = return_value - request = space_event.GetSpaceEventRequest() + request = section.ListSectionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = space_event.SpaceEvent() - post_with_metadata.return_value = space_event.SpaceEvent(), metadata + post.return_value = section.ListSectionsResponse() + post_with_metadata.return_value = section.ListSectionsResponse(), metadata - client.get_space_event( + client.list_sections( request, metadata=[ ("key", "val"), @@ -28076,14 +33598,12 @@ def test_get_space_event_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_space_events_rest_bad_request( - request_type=space_event.ListSpaceEventsRequest, -): +def test_position_section_rest_bad_request(request_type=section.PositionSectionRequest): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "spaces/sample1"} + request_init = {"name": "users/sample1/sections/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -28099,51 +33619,48 @@ def test_list_space_events_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_space_events(request) + client.position_section(request) @pytest.mark.parametrize( "request_type", [ - space_event.ListSpaceEventsRequest, + section.PositionSectionRequest, dict, ], ) -def test_list_space_events_rest_call_success(request_type): +def test_position_section_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "spaces/sample1"} + request_init = {"name": "users/sample1/sections/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space_event.ListSpaceEventsResponse( - next_page_token="next_page_token_value", - ) + return_value = section.PositionSectionResponse() # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 # Convert return value to protobuf type - return_value = space_event.ListSpaceEventsResponse.pb(return_value) + return_value = section.PositionSectionResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_space_events(request) + response = client.position_section(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSpaceEventsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, section.PositionSectionResponse) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_space_events_rest_interceptors(null_interceptor): +def test_position_section_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -28156,22 +33673,19 @@ def test_list_space_events_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_list_space_events" + transports.ChatServiceRestInterceptor, "post_position_section" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, - "post_list_space_events_with_metadata", + transports.ChatServiceRestInterceptor, "post_position_section_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_list_space_events" + transports.ChatServiceRestInterceptor, "pre_position_section" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = space_event.ListSpaceEventsRequest.pb( - space_event.ListSpaceEventsRequest() - ) + pb_message = section.PositionSectionRequest.pb(section.PositionSectionRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -28182,24 +33696,21 @@ def test_list_space_events_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = space_event.ListSpaceEventsResponse.to_json( - space_event.ListSpaceEventsResponse() + return_value = section.PositionSectionResponse.to_json( + section.PositionSectionResponse() ) req.return_value.content = return_value - request = space_event.ListSpaceEventsRequest() + request = section.PositionSectionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = space_event.ListSpaceEventsResponse() - post_with_metadata.return_value = ( - space_event.ListSpaceEventsResponse(), - metadata, - ) + post.return_value = section.PositionSectionResponse() + post_with_metadata.return_value = section.PositionSectionResponse(), metadata - client.list_space_events( + client.position_section( request, metadata=[ ("key", "val"), @@ -28212,14 +33723,14 @@ def test_list_space_events_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_space_notification_setting_rest_bad_request( - request_type=space_notification_setting.GetSpaceNotificationSettingRequest, +def test_list_section_items_rest_bad_request( + request_type=section.ListSectionItemsRequest, ): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "users/sample1/spaces/sample2/spaceNotificationSetting"} + request_init = {"parent": "users/sample1/sections/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -28235,32 +33746,30 @@ def test_get_space_notification_setting_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_space_notification_setting(request) + client.list_section_items(request) @pytest.mark.parametrize( "request_type", [ - space_notification_setting.GetSpaceNotificationSettingRequest, + section.ListSectionItemsRequest, dict, ], ) -def test_get_space_notification_setting_rest_call_success(request_type): +def test_list_section_items_rest_call_success(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "users/sample1/spaces/sample2/spaceNotificationSetting"} + request_init = {"parent": "users/sample1/sections/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space_notification_setting.SpaceNotificationSetting( - name="name_value", - notification_setting=space_notification_setting.SpaceNotificationSetting.NotificationSetting.ALL, - mute_setting=space_notification_setting.SpaceNotificationSetting.MuteSetting.UNMUTED, + return_value = section.ListSectionItemsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -28268,30 +33777,20 @@ def test_get_space_notification_setting_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = space_notification_setting.SpaceNotificationSetting.pb( - return_value - ) + return_value = section.ListSectionItemsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_space_notification_setting(request) + response = client.list_section_items(request) # Establish that the response is the type that we expect. - assert isinstance(response, space_notification_setting.SpaceNotificationSetting) - assert response.name == "name_value" - assert ( - response.notification_setting - == space_notification_setting.SpaceNotificationSetting.NotificationSetting.ALL - ) - assert ( - response.mute_setting - == space_notification_setting.SpaceNotificationSetting.MuteSetting.UNMUTED - ) + assert isinstance(response, pagers.ListSectionItemsPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_space_notification_setting_rest_interceptors(null_interceptor): +def test_list_section_items_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -28304,21 +33803,21 @@ def test_get_space_notification_setting_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_get_space_notification_setting" + transports.ChatServiceRestInterceptor, "post_list_section_items" ) as post, mock.patch.object( transports.ChatServiceRestInterceptor, - "post_get_space_notification_setting_with_metadata", + "post_list_section_items_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_get_space_notification_setting" + transports.ChatServiceRestInterceptor, "pre_list_section_items" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = space_notification_setting.GetSpaceNotificationSettingRequest.pb( - space_notification_setting.GetSpaceNotificationSettingRequest() + pb_message = section.ListSectionItemsRequest.pb( + section.ListSectionItemsRequest() ) transcode.return_value = { "method": "post", @@ -28330,24 +33829,21 @@ def test_get_space_notification_setting_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = space_notification_setting.SpaceNotificationSetting.to_json( - space_notification_setting.SpaceNotificationSetting() + return_value = section.ListSectionItemsResponse.to_json( + section.ListSectionItemsResponse() ) req.return_value.content = return_value - request = space_notification_setting.GetSpaceNotificationSettingRequest() + request = section.ListSectionItemsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = space_notification_setting.SpaceNotificationSetting() - post_with_metadata.return_value = ( - space_notification_setting.SpaceNotificationSetting(), - metadata, - ) + post.return_value = section.ListSectionItemsResponse() + post_with_metadata.return_value = section.ListSectionItemsResponse(), metadata - client.get_space_notification_setting( + client.list_section_items( request, metadata=[ ("key", "val"), @@ -28360,18 +33856,14 @@ def test_get_space_notification_setting_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_space_notification_setting_rest_bad_request( - request_type=gc_space_notification_setting.UpdateSpaceNotificationSettingRequest, +def test_move_section_item_rest_bad_request( + request_type=section.MoveSectionItemRequest, ): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "space_notification_setting": { - "name": "users/sample1/spaces/sample2/spaceNotificationSetting" - } - } + request_init = {"name": "users/sample1/sections/sample2/items/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -28386,148 +33878,49 @@ def test_update_space_notification_setting_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_space_notification_setting(request) - - -@pytest.mark.parametrize( - "request_type", - [ - gc_space_notification_setting.UpdateSpaceNotificationSettingRequest, - dict, - ], -) -def test_update_space_notification_setting_rest_call_success(request_type): - client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "space_notification_setting": { - "name": "users/sample1/spaces/sample2/spaceNotificationSetting" - } - } - request_init["space_notification_setting"] = { - "name": "users/sample1/spaces/sample2/spaceNotificationSetting", - "notification_setting": 1, - "mute_setting": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = ( - gc_space_notification_setting.UpdateSpaceNotificationSettingRequest.meta.fields[ - "space_notification_setting" - ] - ) - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.move_section_item(request) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "space_notification_setting" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + section.MoveSectionItemRequest, + dict, + ], +) +def test_move_section_item_rest_call_success(request_type): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["space_notification_setting"][field]) - ): - del request_init["space_notification_setting"][field][i][subfield] - else: - del request_init["space_notification_setting"][field][subfield] + # send a request that will satisfy transcoding + request_init = {"name": "users/sample1/sections/sample2/items/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gc_space_notification_setting.SpaceNotificationSetting( - name="name_value", - notification_setting=gc_space_notification_setting.SpaceNotificationSetting.NotificationSetting.ALL, - mute_setting=gc_space_notification_setting.SpaceNotificationSetting.MuteSetting.UNMUTED, - ) + return_value = section.MoveSectionItemResponse() # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gc_space_notification_setting.SpaceNotificationSetting.pb( - return_value - ) + return_value = section.MoveSectionItemResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_space_notification_setting(request) + response = client.move_section_item(request) # Establish that the response is the type that we expect. - assert isinstance(response, gc_space_notification_setting.SpaceNotificationSetting) - assert response.name == "name_value" - assert ( - response.notification_setting - == gc_space_notification_setting.SpaceNotificationSetting.NotificationSetting.ALL - ) - assert ( - response.mute_setting - == gc_space_notification_setting.SpaceNotificationSetting.MuteSetting.UNMUTED - ) + assert isinstance(response, section.MoveSectionItemResponse) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_space_notification_setting_rest_interceptors(null_interceptor): +def test_move_section_item_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -28540,26 +33933,20 @@ def test_update_space_notification_setting_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, - "post_update_space_notification_setting", + transports.ChatServiceRestInterceptor, "post_move_section_item" ) as post, mock.patch.object( transports.ChatServiceRestInterceptor, - "post_update_space_notification_setting_with_metadata", + "post_move_section_item_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ChatServiceRestInterceptor, - "pre_update_space_notification_setting", + transports.ChatServiceRestInterceptor, "pre_move_section_item" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = ( - gc_space_notification_setting.UpdateSpaceNotificationSettingRequest.pb( - gc_space_notification_setting.UpdateSpaceNotificationSettingRequest() - ) - ) + pb_message = section.MoveSectionItemRequest.pb(section.MoveSectionItemRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -28570,24 +33957,21 @@ def test_update_space_notification_setting_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gc_space_notification_setting.SpaceNotificationSetting.to_json( - gc_space_notification_setting.SpaceNotificationSetting() + return_value = section.MoveSectionItemResponse.to_json( + section.MoveSectionItemResponse() ) req.return_value.content = return_value - request = gc_space_notification_setting.UpdateSpaceNotificationSettingRequest() + request = section.MoveSectionItemRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gc_space_notification_setting.SpaceNotificationSetting() - post_with_metadata.return_value = ( - gc_space_notification_setting.SpaceNotificationSetting(), - metadata, - ) + post.return_value = section.MoveSectionItemResponse() + post_with_metadata.return_value = section.MoveSectionItemResponse(), metadata - client.update_space_notification_setting( + client.move_section_item( request, metadata=[ ("key", "val"), @@ -29339,6 +34723,150 @@ def test_update_space_notification_setting_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_section_empty_call_rest(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_section), "__call__") as call: + client.create_section(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_section.CreateSectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_section_empty_call_rest(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_section), "__call__") as call: + client.delete_section(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = section.DeleteSectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_section_empty_call_rest(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_section), "__call__") as call: + client.update_section(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gc_section.UpdateSectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sections_empty_call_rest(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_sections), "__call__") as call: + client.list_sections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = section.ListSectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_position_section_empty_call_rest(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.position_section), "__call__") as call: + client.position_section(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = section.PositionSectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_section_items_empty_call_rest(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_section_items), "__call__" + ) as call: + client.list_section_items(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = section.ListSectionItemsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_move_section_item_empty_call_rest(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.move_section_item), "__call__" + ) as call: + client.move_section_item(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = section.MoveSectionItemRequest() + + assert args[0] == request_msg + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = ChatServiceClient( @@ -29407,6 +34935,13 @@ def test_chat_service_base_transport(): "list_space_events", "get_space_notification_setting", "update_space_notification_setting", + "create_section", + "delete_section", + "update_section", + "list_sections", + "position_section", + "list_section_items", + "move_section_item", ) for method in methods: with pytest.raises(NotImplementedError): @@ -29451,9 +34986,11 @@ def test_chat_service_base_transport_with_credentials_file(): "https://www.googleapis.com/auth/chat.admin.spaces.readonly", "https://www.googleapis.com/auth/chat.app.delete", "https://www.googleapis.com/auth/chat.app.memberships", + "https://www.googleapis.com/auth/chat.app.memberships.readonly", "https://www.googleapis.com/auth/chat.app.messages.readonly", "https://www.googleapis.com/auth/chat.app.spaces", "https://www.googleapis.com/auth/chat.app.spaces.create", + "https://www.googleapis.com/auth/chat.app.spaces.readonly", "https://www.googleapis.com/auth/chat.bot", "https://www.googleapis.com/auth/chat.customemojis", "https://www.googleapis.com/auth/chat.customemojis.readonly", @@ -29473,6 +35010,8 @@ def test_chat_service_base_transport_with_credentials_file(): "https://www.googleapis.com/auth/chat.spaces.readonly", "https://www.googleapis.com/auth/chat.users.readstate", "https://www.googleapis.com/auth/chat.users.readstate.readonly", + "https://www.googleapis.com/auth/chat.users.sections", + "https://www.googleapis.com/auth/chat.users.sections.readonly", "https://www.googleapis.com/auth/chat.users.spacesettings", ), quota_project_id="octopus", @@ -29508,9 +35047,11 @@ def test_chat_service_auth_adc(): "https://www.googleapis.com/auth/chat.admin.spaces.readonly", "https://www.googleapis.com/auth/chat.app.delete", "https://www.googleapis.com/auth/chat.app.memberships", + "https://www.googleapis.com/auth/chat.app.memberships.readonly", "https://www.googleapis.com/auth/chat.app.messages.readonly", "https://www.googleapis.com/auth/chat.app.spaces", "https://www.googleapis.com/auth/chat.app.spaces.create", + "https://www.googleapis.com/auth/chat.app.spaces.readonly", "https://www.googleapis.com/auth/chat.bot", "https://www.googleapis.com/auth/chat.customemojis", "https://www.googleapis.com/auth/chat.customemojis.readonly", @@ -29530,6 +35071,8 @@ def test_chat_service_auth_adc(): "https://www.googleapis.com/auth/chat.spaces.readonly", "https://www.googleapis.com/auth/chat.users.readstate", "https://www.googleapis.com/auth/chat.users.readstate.readonly", + "https://www.googleapis.com/auth/chat.users.sections", + "https://www.googleapis.com/auth/chat.users.sections.readonly", "https://www.googleapis.com/auth/chat.users.spacesettings", ), quota_project_id=None, @@ -29559,9 +35102,11 @@ def test_chat_service_transport_auth_adc(transport_class): "https://www.googleapis.com/auth/chat.admin.spaces.readonly", "https://www.googleapis.com/auth/chat.app.delete", "https://www.googleapis.com/auth/chat.app.memberships", + "https://www.googleapis.com/auth/chat.app.memberships.readonly", "https://www.googleapis.com/auth/chat.app.messages.readonly", "https://www.googleapis.com/auth/chat.app.spaces", "https://www.googleapis.com/auth/chat.app.spaces.create", + "https://www.googleapis.com/auth/chat.app.spaces.readonly", "https://www.googleapis.com/auth/chat.bot", "https://www.googleapis.com/auth/chat.customemojis", "https://www.googleapis.com/auth/chat.customemojis.readonly", @@ -29581,6 +35126,8 @@ def test_chat_service_transport_auth_adc(transport_class): "https://www.googleapis.com/auth/chat.spaces.readonly", "https://www.googleapis.com/auth/chat.users.readstate", "https://www.googleapis.com/auth/chat.users.readstate.readonly", + "https://www.googleapis.com/auth/chat.users.sections", + "https://www.googleapis.com/auth/chat.users.sections.readonly", "https://www.googleapis.com/auth/chat.users.spacesettings", ), quota_project_id="octopus", @@ -29643,9 +35190,11 @@ def test_chat_service_transport_create_channel(transport_class, grpc_helpers): "https://www.googleapis.com/auth/chat.admin.spaces.readonly", "https://www.googleapis.com/auth/chat.app.delete", "https://www.googleapis.com/auth/chat.app.memberships", + "https://www.googleapis.com/auth/chat.app.memberships.readonly", "https://www.googleapis.com/auth/chat.app.messages.readonly", "https://www.googleapis.com/auth/chat.app.spaces", "https://www.googleapis.com/auth/chat.app.spaces.create", + "https://www.googleapis.com/auth/chat.app.spaces.readonly", "https://www.googleapis.com/auth/chat.bot", "https://www.googleapis.com/auth/chat.customemojis", "https://www.googleapis.com/auth/chat.customemojis.readonly", @@ -29665,6 +35214,8 @@ def test_chat_service_transport_create_channel(transport_class, grpc_helpers): "https://www.googleapis.com/auth/chat.spaces.readonly", "https://www.googleapis.com/auth/chat.users.readstate", "https://www.googleapis.com/auth/chat.users.readstate.readonly", + "https://www.googleapis.com/auth/chat.users.sections", + "https://www.googleapis.com/auth/chat.users.sections.readonly", "https://www.googleapis.com/auth/chat.users.spacesettings", ), scopes=["1", "2"], @@ -29896,6 +35447,27 @@ def test_chat_service_client_transport_session_collision(transport_name): session1 = client1.transport.update_space_notification_setting._session session2 = client2.transport.update_space_notification_setting._session assert session1 != session2 + session1 = client1.transport.create_section._session + session2 = client2.transport.create_section._session + assert session1 != session2 + session1 = client1.transport.delete_section._session + session2 = client2.transport.delete_section._session + assert session1 != session2 + session1 = client1.transport.update_section._session + session2 = client2.transport.update_section._session + assert session1 != session2 + session1 = client1.transport.list_sections._session + session2 = client2.transport.list_sections._session + assert session1 != session2 + session1 = client1.transport.position_section._session + session2 = client2.transport.position_section._session + assert session1 != session2 + session1 = client1.transport.list_section_items._session + session2 = client2.transport.list_section_items._session + assert session1 != session2 + session1 = client1.transport.move_section_item._session + session2 = client2.transport.move_section_item._session + assert session1 != session2 def test_chat_service_grpc_transport_channel(): @@ -30163,8 +35735,57 @@ def test_parse_reaction_path(): assert expected == actual +def test_section_path(): + user = "oyster" + section = "nudibranch" + expected = "users/{user}/sections/{section}".format( + user=user, + section=section, + ) + actual = ChatServiceClient.section_path(user, section) + assert expected == actual + + +def test_parse_section_path(): + expected = { + "user": "cuttlefish", + "section": "mussel", + } + path = ChatServiceClient.section_path(**expected) + + # Check that the path construction is reversible. + actual = ChatServiceClient.parse_section_path(path) + assert expected == actual + + +def test_section_item_path(): + user = "winkle" + section = "nautilus" + item = "scallop" + expected = "users/{user}/sections/{section}/items/{item}".format( + user=user, + section=section, + item=item, + ) + actual = ChatServiceClient.section_item_path(user, section, item) + assert expected == actual + + +def test_parse_section_item_path(): + expected = { + "user": "abalone", + "section": "squid", + "item": "clam", + } + path = ChatServiceClient.section_item_path(**expected) + + # Check that the path construction is reversible. + actual = ChatServiceClient.parse_section_item_path(path) + assert expected == actual + + def test_space_path(): - space = "oyster" + space = "whelk" expected = "spaces/{space}".format( space=space, ) @@ -30174,7 +35795,7 @@ def test_space_path(): def test_parse_space_path(): expected = { - "space": "nudibranch", + "space": "octopus", } path = ChatServiceClient.space_path(**expected) @@ -30184,8 +35805,8 @@ def test_parse_space_path(): def test_space_event_path(): - space = "cuttlefish" - space_event = "mussel" + space = "oyster" + space_event = "nudibranch" expected = "spaces/{space}/spaceEvents/{space_event}".format( space=space, space_event=space_event, @@ -30196,8 +35817,8 @@ def test_space_event_path(): def test_parse_space_event_path(): expected = { - "space": "winkle", - "space_event": "nautilus", + "space": "cuttlefish", + "space_event": "mussel", } path = ChatServiceClient.space_event_path(**expected) @@ -30207,8 +35828,8 @@ def test_parse_space_event_path(): def test_space_notification_setting_path(): - user = "scallop" - space = "abalone" + user = "winkle" + space = "nautilus" expected = "users/{user}/spaces/{space}/spaceNotificationSetting".format( user=user, space=space, @@ -30219,8 +35840,8 @@ def test_space_notification_setting_path(): def test_parse_space_notification_setting_path(): expected = { - "user": "squid", - "space": "clam", + "user": "scallop", + "space": "abalone", } path = ChatServiceClient.space_notification_setting_path(**expected) @@ -30230,8 +35851,8 @@ def test_parse_space_notification_setting_path(): def test_space_read_state_path(): - user = "whelk" - space = "octopus" + user = "squid" + space = "clam" expected = "users/{user}/spaces/{space}/spaceReadState".format( user=user, space=space, @@ -30242,8 +35863,8 @@ def test_space_read_state_path(): def test_parse_space_read_state_path(): expected = { - "user": "oyster", - "space": "nudibranch", + "user": "whelk", + "space": "octopus", } path = ChatServiceClient.space_read_state_path(**expected) @@ -30253,8 +35874,8 @@ def test_parse_space_read_state_path(): def test_thread_path(): - space = "cuttlefish" - thread = "mussel" + space = "oyster" + thread = "nudibranch" expected = "spaces/{space}/threads/{thread}".format( space=space, thread=thread, @@ -30265,8 +35886,8 @@ def test_thread_path(): def test_parse_thread_path(): expected = { - "space": "winkle", - "thread": "nautilus", + "space": "cuttlefish", + "thread": "mussel", } path = ChatServiceClient.thread_path(**expected) @@ -30276,9 +35897,9 @@ def test_parse_thread_path(): def test_thread_read_state_path(): - user = "scallop" - space = "abalone" - thread = "squid" + user = "winkle" + space = "nautilus" + thread = "scallop" expected = "users/{user}/spaces/{space}/threads/{thread}/threadReadState".format( user=user, space=space, @@ -30290,9 +35911,9 @@ def test_thread_read_state_path(): def test_parse_thread_read_state_path(): expected = { - "user": "clam", - "space": "whelk", - "thread": "octopus", + "user": "abalone", + "space": "squid", + "thread": "clam", } path = ChatServiceClient.thread_read_state_path(**expected) @@ -30301,6 +35922,26 @@ def test_parse_thread_read_state_path(): assert expected == actual +def test_user_path(): + user = "whelk" + expected = "users/{user}".format( + user=user, + ) + actual = ChatServiceClient.user_path(user) + assert expected == actual + + +def test_parse_user_path(): + expected = { + "user": "octopus", + } + path = ChatServiceClient.user_path(**expected) + + # Check that the path construction is reversible. + actual = ChatServiceClient.parse_user_path(path) + assert expected == actual + + def test_common_billing_account_path(): billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( diff --git a/packages/google-apps-events-subscriptions/.repo-metadata.json b/packages/google-apps-events-subscriptions/.repo-metadata.json index 2fa1bd25e8f5..34f9205b3f87 100644 --- a/packages/google-apps-events-subscriptions/.repo-metadata.json +++ b/packages/google-apps-events-subscriptions/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "The Google Workspace Events API lets you subscribe to events and manage change notifications across Google Workspace applications.", - "api_id": "subscriptions.googleapis.com", - "api_shortname": "subscriptions", - "client_documentation": "https://googleapis.dev/python/google-apps-events-subscriptions/latest", - "default_version": "v1", - "distribution_name": "google-apps-events-subscriptions", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-apps-events-subscriptions", - "name_pretty": "Google Workspace Events API", - "product_documentation": "https://developers.google.com/workspace/events", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "The Google Workspace Events API lets you subscribe to events and manage change notifications across Google Workspace applications.", + "api_id": "subscriptions.googleapis.com", + "api_shortname": "subscriptions", + "client_documentation": "https://googleapis.dev/python/google-apps-events-subscriptions/latest", + "default_version": "v1", + "distribution_name": "google-apps-events-subscriptions", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-apps-events-subscriptions", + "name_pretty": "Google Workspace Events API", + "product_documentation": "https://developers.google.com/workspace/events", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-apps-meet/.repo-metadata.json b/packages/google-apps-meet/.repo-metadata.json index 51a46cac2d13..c172a9a33528 100644 --- a/packages/google-apps-meet/.repo-metadata.json +++ b/packages/google-apps-meet/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Create and manage meetings in Google Meet.", - "api_id": "meet.googleapis.com", - "api_shortname": "meet", - "client_documentation": "https://googleapis.dev/python/google-apps-meet/latest", - "default_version": "v2", - "distribution_name": "google-apps-meet", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1216362&template=1766418", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-apps-meet", - "name_pretty": "Google Meet API", - "product_documentation": "https://developers.google.com/meet/api/guides/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Create and manage meetings in Google Meet.", + "api_id": "meet.googleapis.com", + "api_shortname": "meet", + "client_documentation": "https://googleapis.dev/python/google-apps-meet/latest", + "default_version": "v2", + "distribution_name": "google-apps-meet", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1216362\u0026template=1766418", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-apps-meet", + "name_pretty": "Google Meet API", + "product_documentation": "https://developers.google.com/meet/api/guides/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-apps-script-type/.repo-metadata.json b/packages/google-apps-script-type/.repo-metadata.json index 26e0895b7452..a7876dfadbc7 100644 --- a/packages/google-apps-script-type/.repo-metadata.json +++ b/packages/google-apps-script-type/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "", - "api_id": "type.googleapis.com", - "api_shortname": "type", - "client_documentation": "https://googleapis.dev/python/type/latest", - "default_version": "apiVersion", - "distribution_name": "google-apps-script-type", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "type", - "name_pretty": "Google Apps Script Type Protos", - "product_documentation": "https://developers.google.com/apps-script/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_id": "type.googleapis.com", + "api_shortname": "type", + "client_documentation": "https://googleapis.dev/python/type/latest", + "default_version": "apiVersion", + "distribution_name": "google-apps-script-type", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "type", + "name_pretty": "Google Apps Script Type Protos", + "product_documentation": "https://developers.google.com/apps-script/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-area120-tables/.repo-metadata.json b/packages/google-area120-tables/.repo-metadata.json index f0ffeaf75062..401f6f9adfaf 100644 --- a/packages/google-area120-tables/.repo-metadata.json +++ b/packages/google-area120-tables/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "provides programmatic methods to the Area 120 Tables API.", - "api_id": "area120tables.googleapis.com", - "api_shortname": "area120tables", - "client_documentation": "https://googleapis.dev/python/area120tables/latest", - "default_version": "v1alpha1", - "distribution_name": "google-area120-tables", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "area120tables", - "name_pretty": "Area 120 Tables", - "product_documentation": "https://area120.google.com", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "provides programmatic methods to the Area 120 Tables API.", + "api_id": "area120tables.googleapis.com", + "api_shortname": "area120tables", + "client_documentation": "https://googleapis.dev/python/area120tables/latest", + "default_version": "v1alpha1", + "distribution_name": "google-area120-tables", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "area120tables", + "name_pretty": "Area 120 Tables", + "product_documentation": "https://area120.google.com", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-auth-httplib2/.repo-metadata.json b/packages/google-auth-httplib2/.repo-metadata.json index 476cf8d11499..6de607dccf8b 100644 --- a/packages/google-auth-httplib2/.repo-metadata.json +++ b/packages/google-auth-httplib2/.repo-metadata.json @@ -1,13 +1,11 @@ { - "name": "google-auth-httplib2", - "name_pretty": "Google Auth httplib2", "client_documentation": "https://googleapis.dev/python/google-auth-httplib2/latest", + "distribution_name": "google-auth-httplib2", "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "release_level": "preview", "language": "python", "library_type": "AUTH", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-auth-httplib2", - "default_version": "", - "codeowner_team": "@googleapis/googleapis-auth" -} + "name": "google-auth-httplib2", + "name_pretty": "Google Auth httplib2", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-auth-httplib2/docs/README.rst b/packages/google-auth-httplib2/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/packages/google-auth-httplib2/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/packages/google-auth-httplib2/docs/README.rst b/packages/google-auth-httplib2/docs/README.rst new file mode 100644 index 000000000000..2b7af77f51cd --- /dev/null +++ b/packages/google-auth-httplib2/docs/README.rst @@ -0,0 +1,49 @@ +``httplib2`` Transport for Google Auth +====================================== + +.. image:: https://img.shields.io/badge/status-deprecated-red.svg + :target: https://github.com/httplib2/httplib2 + +The library was created to help clients migrate from `oauth2client `_ to `google-auth`_, however this library is no longer maintained. For any new usages please see provided transport layers by `google-auth`_ library. + + +Documentation +------------- + +`httplib2 Transport for Google Auth Documentation`_ + + +Introduction +------------ +|pypi| + +This library provides an `httplib2`_ transport for `google-auth`_. + +.. note:: ``httplib`` has lots of problems such as lack of threadsafety + and insecure usage of TLS. Using it is highly discouraged. This + library is intended to help existing users of ``oauth2client`` migrate to + ``google-auth``. + +.. |pypi| image:: https://img.shields.io/pypi/v/google-auth-httplib2.svg + :target: https://pypi.python.org/pypi/google-auth-httplib2 + +.. _httplib2: https://github.com/httplib2/httplib2 +.. _httplib2 Transport for Google Auth Documentation: https://googleapis.dev/python/google-auth-httplib2/latest +.. _google-auth: https://github.com/GoogleCloudPlatform/google-auth-library-python/ + + +Installing +---------- + +You can install using `pip`_:: + + $ pip install google-auth-httplib2 + +.. _pip: https://pip.pypa.io/en/stable/ + +License +------- + +Apache 2.0 - See `the LICENSE`_ for more information. + +.. _the LICENSE: https://github.com/GoogleCloudPlatform/google-auth-library-python/blob/main/LICENSE diff --git a/packages/google-auth-oauthlib/.repo-metadata.json b/packages/google-auth-oauthlib/.repo-metadata.json index 2fb50ea72866..f99f9c26ad93 100644 --- a/packages/google-auth-oauthlib/.repo-metadata.json +++ b/packages/google-auth-oauthlib/.repo-metadata.json @@ -1,13 +1,11 @@ { - "name": "google-auth-oauthlib", - "name_pretty": "Google Auth OAuthlib", "client_documentation": "https://googleapis.dev/python/google-auth-oauthlib/latest", + "distribution_name": "google-auth-oauthlib", "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "release_level": "stable", "language": "python", "library_type": "AUTH", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-auth-oauthlib", - "default_version": "", - "codeowner_team": "@googleapis/googleapis-auth" -} + "name": "google-auth-oauthlib", + "name_pretty": "Google Auth OAuthlib", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-auth/.repo-metadata.json b/packages/google-auth/.repo-metadata.json index f6a3d96a0c97..557f5e2ead91 100644 --- a/packages/google-auth/.repo-metadata.json +++ b/packages/google-auth/.repo-metadata.json @@ -1,11 +1,11 @@ { - "name": "google-auth", - "name_pretty": "Google Auth Python Library", "client_documentation": "https://googleapis.dev/python/google-auth/latest", + "distribution_name": "google-auth", "issue_tracker": "https://github.com/googleapis/google-auth-library-python/issues", - "release_level": "stable", "language": "python", "library_type": "AUTH", - "repo": "googleapis/google-auth-library-python", - "distribution_name": "google-auth" -} + "name": "google-auth", + "name_pretty": "Google Auth Python Library", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-access-approval/.repo-metadata.json b/packages/google-cloud-access-approval/.repo-metadata.json index adf3a6c1e11e..0dfd02e4c19f 100644 --- a/packages/google-cloud-access-approval/.repo-metadata.json +++ b/packages/google-cloud-access-approval/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "enables controlling access to your organization's data by Google personnel.", - "api_id": "accessapproval.googleapis.com", - "api_shortname": "accessapproval", - "client_documentation": "https://cloud.google.com/python/docs/reference/accessapproval/latest", - "default_version": "v1", - "distribution_name": "google-cloud-access-approval", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "accessapproval", - "name_pretty": "Access Approval", - "product_documentation": "https://cloud.google.com/access-approval", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "enables controlling access to your organization's data by Google personnel.", + "api_id": "accessapproval.googleapis.com", + "api_shortname": "accessapproval", + "client_documentation": "https://cloud.google.com/python/docs/reference/accessapproval/latest", + "default_version": "v1", + "distribution_name": "google-cloud-access-approval", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "accessapproval", + "name_pretty": "Access Approval", + "product_documentation": "https://cloud.google.com/access-approval", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-access-context-manager/.repo-metadata.json b/packages/google-cloud-access-context-manager/.repo-metadata.json index 7d593997abbe..76e618278328 100644 --- a/packages/google-cloud-access-context-manager/.repo-metadata.json +++ b/packages/google-cloud-access-context-manager/.repo-metadata.json @@ -1,14 +1,16 @@ { - "api_shortname": "accesscontextmanager", - "client_documentation": "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-context-manager", - "default_version": "apiVersion", - "distribution_name": "google-cloud-access-context-manager", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "accesscontextmanager", - "name_pretty": "Access Context Manager", - "product_documentation": "https://cloud.google.com/access-context-manager/docs/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "An API for setting attribute based access control to requests to GCP\nservices.", + "api_id": "accesscontextmanager.googleapis.com", + "api_shortname": "accesscontextmanager", + "client_documentation": "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-context-manager", + "default_version": "apiVersion", + "distribution_name": "google-cloud-access-context-manager", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "accesscontextmanager", + "name_pretty": "Access Context Manager", + "product_documentation": "https://cloud.google.com/access-context-manager/docs/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-access-context-manager/README.rst b/packages/google-cloud-access-context-manager/README.rst index 2d820cad6eb9..db57cffa67c6 100644 --- a/packages/google-cloud-access-context-manager/README.rst +++ b/packages/google-cloud-access-context-manager/README.rst @@ -3,7 +3,8 @@ Python Client for Access Context Manager |preview| |pypi| |versions| -`Access Context Manager`_: +`Access Context Manager`_: An API for setting attribute based access control to requests to GCP +services. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-access-context-manager/docs/README.rst b/packages/google-cloud-access-context-manager/docs/README.rst index 2d820cad6eb9..db57cffa67c6 100644 --- a/packages/google-cloud-access-context-manager/docs/README.rst +++ b/packages/google-cloud-access-context-manager/docs/README.rst @@ -3,7 +3,8 @@ Python Client for Access Context Manager |preview| |pypi| |versions| -`Access Context Manager`_: +`Access Context Manager`_: An API for setting attribute based access control to requests to GCP +services. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_context_manager_pb2.py b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_context_manager_pb2.py index accc75c92063..7d241b53784f 100644 --- a/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_context_manager_pb2.py +++ b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_context_manager_pb2.py @@ -35,6 +35,11 @@ from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2 from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 + from google.identity.accesscontextmanager.v1 import ( access_level_pb2 as google_dot_identity_dot_accesscontextmanager_dot_v1_dot_access__level__pb2, ) @@ -47,10 +52,6 @@ from google.identity.accesscontextmanager.v1 import ( service_perimeter_pb2 as google_dot_identity_dot_accesscontextmanager_dot_v1_dot_service__perimeter__pb2, ) -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( b'\nDgoogle/identity/accesscontextmanager/v1/access_context_manager.proto\x12\'google.identity.accesscontextmanager.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a:google/identity/accesscontextmanager/v1/access_level.proto\x1a;google/identity/accesscontextmanager/v1/access_policy.proto\x1a\x45google/identity/accesscontextmanager/v1/gcp_user_access_binding.proto\x1a?google/identity/accesscontextmanager/v1/service_perimeter.proto\x1a#google/longrunning/operations.proto\x1a google/protobuf/field_mask.proto"\x8c\x01\n\x19ListAccessPoliciesRequest\x12H\n\x06parent\x18\x01 \x01(\tB8\xe0\x41\x02\xfa\x41\x32\n0cloudresourcemanager.googleapis.com/Organization\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"\x85\x01\n\x1aListAccessPoliciesResponse\x12N\n\x0f\x61\x63\x63\x65ss_policies\x18\x01 \x03(\x0b\x32\x35.google.identity.accesscontextmanager.v1.AccessPolicy\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"`\n\x16GetAccessPolicyRequest\x12\x46\n\x04name\x18\x01 \x01(\tB8\xe0\x41\x02\xfa\x41\x32\n0accesscontextmanager.googleapis.com/AccessPolicy"\x9d\x01\n\x19UpdateAccessPolicyRequest\x12J\n\x06policy\x18\x01 \x01(\x0b\x32\x35.google.identity.accesscontextmanager.v1.AccessPolicyB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"c\n\x19\x44\x65leteAccessPolicyRequest\x12\x46\n\x04name\x18\x01 \x01(\tB8\xe0\x41\x02\xfa\x41\x32\n0accesscontextmanager.googleapis.com/AccessPolicy"\xdc\x01\n\x17ListAccessLevelsRequest\x12G\n\x06parent\x18\x01 \x01(\tB7\xe0\x41\x02\xfa\x41\x31\x12/accesscontextmanager.googleapis.com/AccessLevel\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12Q\n\x13\x61\x63\x63\x65ss_level_format\x18\x04 \x01(\x0e\x32\x34.google.identity.accesscontextmanager.v1.LevelFormat"\x80\x01\n\x18ListAccessLevelsResponse\x12K\n\raccess_levels\x18\x01 \x03(\x0b\x32\x34.google.identity.accesscontextmanager.v1.AccessLevel\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xb1\x01\n\x15GetAccessLevelRequest\x12\x45\n\x04name\x18\x01 \x01(\tB7\xe0\x41\x02\xfa\x41\x31\n/accesscontextmanager.googleapis.com/AccessLevel\x12Q\n\x13\x61\x63\x63\x65ss_level_format\x18\x02 \x01(\x0e\x32\x34.google.identity.accesscontextmanager.v1.LevelFormat"\xb4\x01\n\x18\x43reateAccessLevelRequest\x12G\n\x06parent\x18\x01 \x01(\tB7\xe0\x41\x02\xfa\x41\x31\x12/accesscontextmanager.googleapis.com/AccessLevel\x12O\n\x0c\x61\x63\x63\x65ss_level\x18\x02 \x01(\x0b\x32\x34.google.identity.accesscontextmanager.v1.AccessLevelB\x03\xe0\x41\x02"\xa1\x01\n\x18UpdateAccessLevelRequest\x12O\n\x0c\x61\x63\x63\x65ss_level\x18\x01 \x01(\x0b\x32\x34.google.identity.accesscontextmanager.v1.AccessLevelB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"a\n\x18\x44\x65leteAccessLevelRequest\x12\x45\n\x04name\x18\x01 \x01(\tB7\xe0\x41\x02\xfa\x41\x31\n/accesscontextmanager.googleapis.com/AccessLevel"\xc5\x01\n\x1aReplaceAccessLevelsRequest\x12G\n\x06parent\x18\x01 \x01(\tB7\xe0\x41\x02\xfa\x41\x31\x12/accesscontextmanager.googleapis.com/AccessLevel\x12P\n\raccess_levels\x18\x02 \x03(\x0b\x32\x34.google.identity.accesscontextmanager.v1.AccessLevelB\x03\xe0\x41\x02\x12\x0c\n\x04\x65tag\x18\x04 \x01(\t"j\n\x1bReplaceAccessLevelsResponse\x12K\n\raccess_levels\x18\x01 \x03(\x0b\x32\x34.google.identity.accesscontextmanager.v1.AccessLevel"\x93\x01\n\x1cListServicePerimetersRequest\x12L\n\x06parent\x18\x01 \x01(\tB<\xe0\x41\x02\xfa\x41\x36\x12\x34\x61\x63\x63\x65sscontextmanager.googleapis.com/ServicePerimeter\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"\x8f\x01\n\x1dListServicePerimetersResponse\x12U\n\x12service_perimeters\x18\x01 \x03(\x0b\x32\x39.google.identity.accesscontextmanager.v1.ServicePerimeter\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"h\n\x1aGetServicePerimeterRequest\x12J\n\x04name\x18\x01 \x01(\tB<\xe0\x41\x02\xfa\x41\x36\n4accesscontextmanager.googleapis.com/ServicePerimeter"\xc8\x01\n\x1d\x43reateServicePerimeterRequest\x12L\n\x06parent\x18\x01 \x01(\tB<\xe0\x41\x02\xfa\x41\x36\x12\x34\x61\x63\x63\x65sscontextmanager.googleapis.com/ServicePerimeter\x12Y\n\x11service_perimeter\x18\x02 \x01(\x0b\x32\x39.google.identity.accesscontextmanager.v1.ServicePerimeterB\x03\xe0\x41\x02"\xb0\x01\n\x1dUpdateServicePerimeterRequest\x12Y\n\x11service_perimeter\x18\x01 \x01(\x0b\x32\x39.google.identity.accesscontextmanager.v1.ServicePerimeterB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"k\n\x1d\x44\x65leteServicePerimeterRequest\x12J\n\x04name\x18\x01 \x01(\tB<\xe0\x41\x02\xfa\x41\x36\n4accesscontextmanager.googleapis.com/ServicePerimeter"\xd9\x01\n\x1fReplaceServicePerimetersRequest\x12L\n\x06parent\x18\x01 \x01(\tB<\xe0\x41\x02\xfa\x41\x36\x12\x34\x61\x63\x63\x65sscontextmanager.googleapis.com/ServicePerimeter\x12Z\n\x12service_perimeters\x18\x02 \x03(\x0b\x32\x39.google.identity.accesscontextmanager.v1.ServicePerimeterB\x03\xe0\x41\x02\x12\x0c\n\x04\x65tag\x18\x03 \x01(\t"y\n ReplaceServicePerimetersResponse\x12U\n\x12service_perimeters\x18\x01 \x03(\x0b\x32\x39.google.identity.accesscontextmanager.v1.ServicePerimeter"|\n\x1e\x43ommitServicePerimetersRequest\x12L\n\x06parent\x18\x01 \x01(\tB<\xe0\x41\x02\xfa\x41\x36\x12\x34\x61\x63\x63\x65sscontextmanager.googleapis.com/ServicePerimeter\x12\x0c\n\x04\x65tag\x18\x02 \x01(\t"x\n\x1f\x43ommitServicePerimetersResponse\x12U\n\x12service_perimeters\x18\x01 \x03(\x0b\x32\x39.google.identity.accesscontextmanager.v1.ServicePerimeter"\x9d\x01\n ListGcpUserAccessBindingsRequest\x12H\n\x06parent\x18\x01 \x01(\tB8\xe0\x41\x02\xfa\x41\x32\n0cloudresourcemanager.googleapis.com/Organization\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x9d\x01\n!ListGcpUserAccessBindingsResponse\x12_\n\x18gcp_user_access_bindings\x18\x01 \x03(\x0b\x32=.google.identity.accesscontextmanager.v1.GcpUserAccessBinding\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"p\n\x1eGetGcpUserAccessBindingRequest\x12N\n\x04name\x18\x01 \x01(\tB@\xe0\x41\x02\xfa\x41:\n8accesscontextmanager.googleapis.com/GcpUserAccessBinding"\xd2\x01\n!CreateGcpUserAccessBindingRequest\x12H\n\x06parent\x18\x01 \x01(\tB8\xe0\x41\x02\xfa\x41\x32\n0cloudresourcemanager.googleapis.com/Organization\x12\x63\n\x17gcp_user_access_binding\x18\x02 \x01(\x0b\x32=.google.identity.accesscontextmanager.v1.GcpUserAccessBindingB\x03\xe0\x41\x02"\xbe\x01\n!UpdateGcpUserAccessBindingRequest\x12\x63\n\x17gcp_user_access_binding\x18\x01 \x01(\x0b\x32=.google.identity.accesscontextmanager.v1.GcpUserAccessBindingB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"s\n!DeleteGcpUserAccessBindingRequest\x12N\n\x04name\x18\x01 \x01(\tB@\xe0\x41\x02\xfa\x41:\n8accesscontextmanager.googleapis.com/GcpUserAccessBinding"\'\n%GcpUserAccessBindingOperationMetadata"\'\n%AccessContextManagerOperationMetadata*D\n\x0bLevelFormat\x12\x1c\n\x18LEVEL_FORMAT_UNSPECIFIED\x10\x00\x12\x0e\n\nAS_DEFINED\x10\x01\x12\x07\n\x03\x43\x45L\x10\x02\x32\xf1\x32\n\x14\x41\x63\x63\x65ssContextManager\x12\xb9\x01\n\x12ListAccessPolicies\x12\x42.google.identity.accesscontextmanager.v1.ListAccessPoliciesRequest\x1a\x43.google.identity.accesscontextmanager.v1.ListAccessPoliciesResponse"\x1a\x82\xd3\xe4\x93\x02\x14\x12\x12/v1/accessPolicies\x12\xb5\x01\n\x0fGetAccessPolicy\x12?.google.identity.accesscontextmanager.v1.GetAccessPolicyRequest\x1a\x35.google.identity.accesscontextmanager.v1.AccessPolicy"*\xda\x41\x04name\x82\xd3\xe4\x93\x02\x1d\x12\x1b/v1/{name=accessPolicies/*}\x12\xc1\x01\n\x12\x43reateAccessPolicy\x12\x35.google.identity.accesscontextmanager.v1.AccessPolicy\x1a\x1d.google.longrunning.Operation"U\xca\x41\x35\n\x0c\x41\x63\x63\x65ssPolicy\x12%AccessContextManagerOperationMetadata\x82\xd3\xe4\x93\x02\x17"\x12/v1/accessPolicies:\x01*\x12\xf8\x01\n\x12UpdateAccessPolicy\x12\x42.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest\x1a\x1d.google.longrunning.Operation"\x7f\xca\x41\x35\n\x0c\x41\x63\x63\x65ssPolicy\x12%AccessContextManagerOperationMetadata\xda\x41\x12policy,update_mask\x82\xd3\xe4\x93\x02,2"/v1/{policy.name=accessPolicies/*}:\x06policy\x12\xe4\x01\n\x12\x44\x65leteAccessPolicy\x12\x42.google.identity.accesscontextmanager.v1.DeleteAccessPolicyRequest\x1a\x1d.google.longrunning.Operation"k\xca\x41>\n\x15google.protobuf.Empty\x12%AccessContextManagerOperationMetadata\xda\x41\x04name\x82\xd3\xe4\x93\x02\x1d*\x1b/v1/{name=accessPolicies/*}\x12\xd4\x01\n\x10ListAccessLevels\x12@.google.identity.accesscontextmanager.v1.ListAccessLevelsRequest\x1a\x41.google.identity.accesscontextmanager.v1.ListAccessLevelsResponse";\xda\x41\x06parent\x82\xd3\xe4\x93\x02,\x12*/v1/{parent=accessPolicies/*}/accessLevels\x12\xc1\x01\n\x0eGetAccessLevel\x12>.google.identity.accesscontextmanager.v1.GetAccessLevelRequest\x1a\x34.google.identity.accesscontextmanager.v1.AccessLevel"9\xda\x41\x04name\x82\xd3\xe4\x93\x02,\x12*/v1/{name=accessPolicies/*/accessLevels/*}\x12\x85\x02\n\x11\x43reateAccessLevel\x12\x41.google.identity.accesscontextmanager.v1.CreateAccessLevelRequest\x1a\x1d.google.longrunning.Operation"\x8d\x01\xca\x41\x34\n\x0b\x41\x63\x63\x65ssLevel\x12%AccessContextManagerOperationMetadata\xda\x41\x13parent,access_level\x82\xd3\xe4\x93\x02:"*/v1/{parent=accessPolicies/*}/accessLevels:\x0c\x61\x63\x63\x65ss_level\x12\x97\x02\n\x11UpdateAccessLevel\x12\x41.google.identity.accesscontextmanager.v1.UpdateAccessLevelRequest\x1a\x1d.google.longrunning.Operation"\x9f\x01\xca\x41\x34\n\x0b\x41\x63\x63\x65ssLevel\x12%AccessContextManagerOperationMetadata\xda\x41\x18\x61\x63\x63\x65ss_level,update_mask\x82\xd3\xe4\x93\x02G27/v1/{access_level.name=accessPolicies/*/accessLevels/*}:\x0c\x61\x63\x63\x65ss_level\x12\xf1\x01\n\x11\x44\x65leteAccessLevel\x12\x41.google.identity.accesscontextmanager.v1.DeleteAccessLevelRequest\x1a\x1d.google.longrunning.Operation"z\xca\x41>\n\x15google.protobuf.Empty\x12%AccessContextManagerOperationMetadata\xda\x41\x04name\x82\xd3\xe4\x93\x02,**/v1/{name=accessPolicies/*/accessLevels/*}\x12\x83\x02\n\x13ReplaceAccessLevels\x12\x43.google.identity.accesscontextmanager.v1.ReplaceAccessLevelsRequest\x1a\x1d.google.longrunning.Operation"\x87\x01\xca\x41\x44\n\x1bReplaceAccessLevelsResponse\x12%AccessContextManagerOperationMetadata\x82\xd3\xe4\x93\x02:"5/v1/{parent=accessPolicies/*}/accessLevels:replaceAll:\x01*\x12\xe8\x01\n\x15ListServicePerimeters\x12\x45.google.identity.accesscontextmanager.v1.ListServicePerimetersRequest\x1a\x46.google.identity.accesscontextmanager.v1.ListServicePerimetersResponse"@\xda\x41\x06parent\x82\xd3\xe4\x93\x02\x31\x12//v1/{parent=accessPolicies/*}/servicePerimeters\x12\xd5\x01\n\x13GetServicePerimeter\x12\x43.google.identity.accesscontextmanager.v1.GetServicePerimeterRequest\x1a\x39.google.identity.accesscontextmanager.v1.ServicePerimeter">\xda\x41\x04name\x82\xd3\xe4\x93\x02\x31\x12//v1/{name=accessPolicies/*/servicePerimeters/*}\x12\xa3\x02\n\x16\x43reateServicePerimeter\x12\x46.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest\x1a\x1d.google.longrunning.Operation"\xa1\x01\xca\x41\x39\n\x10ServicePerimeter\x12%AccessContextManagerOperationMetadata\xda\x41\x18parent,service_perimeter\x82\xd3\xe4\x93\x02\x44"//v1/{parent=accessPolicies/*}/servicePerimeters:\x11service_perimeter\x12\xba\x02\n\x16UpdateServicePerimeter\x12\x46.google.identity.accesscontextmanager.v1.UpdateServicePerimeterRequest\x1a\x1d.google.longrunning.Operation"\xb8\x01\xca\x41\x39\n\x10ServicePerimeter\x12%AccessContextManagerOperationMetadata\xda\x41\x1dservice_perimeter,update_mask\x82\xd3\xe4\x93\x02V2A/v1/{service_perimeter.name=accessPolicies/*/servicePerimeters/*}:\x11service_perimeter\x12\x80\x02\n\x16\x44\x65leteServicePerimeter\x12\x46.google.identity.accesscontextmanager.v1.DeleteServicePerimeterRequest\x1a\x1d.google.longrunning.Operation"\x7f\xca\x41>\n\x15google.protobuf.Empty\x12%AccessContextManagerOperationMetadata\xda\x41\x04name\x82\xd3\xe4\x93\x02\x31*//v1/{name=accessPolicies/*/servicePerimeters/*}\x12\x97\x02\n\x18ReplaceServicePerimeters\x12H.google.identity.accesscontextmanager.v1.ReplaceServicePerimetersRequest\x1a\x1d.google.longrunning.Operation"\x91\x01\xca\x41I\n ReplaceServicePerimetersResponse\x12%AccessContextManagerOperationMetadata\x82\xd3\xe4\x93\x02?":/v1/{parent=accessPolicies/*}/servicePerimeters:replaceAll:\x01*\x12\x90\x02\n\x17\x43ommitServicePerimeters\x12G.google.identity.accesscontextmanager.v1.CommitServicePerimetersRequest\x1a\x1d.google.longrunning.Operation"\x8c\x01\xca\x41H\n\x1f\x43ommitServicePerimetersResponse\x12%AccessContextManagerOperationMetadata\x82\xd3\xe4\x93\x02;"6/v1/{parent=accessPolicies/*}/servicePerimeters:commit:\x01*\x12\xf7\x01\n\x19ListGcpUserAccessBindings\x12I.google.identity.accesscontextmanager.v1.ListGcpUserAccessBindingsRequest\x1aJ.google.identity.accesscontextmanager.v1.ListGcpUserAccessBindingsResponse"C\xda\x41\x06parent\x82\xd3\xe4\x93\x02\x34\x12\x32/v1/{parent=organizations/*}/gcpUserAccessBindings\x12\xe4\x01\n\x17GetGcpUserAccessBinding\x12G.google.identity.accesscontextmanager.v1.GetGcpUserAccessBindingRequest\x1a=.google.identity.accesscontextmanager.v1.GcpUserAccessBinding"A\xda\x41\x04name\x82\xd3\xe4\x93\x02\x34\x12\x32/v1/{name=organizations/*/gcpUserAccessBindings/*}\x12\xbe\x02\n\x1a\x43reateGcpUserAccessBinding\x12J.google.identity.accesscontextmanager.v1.CreateGcpUserAccessBindingRequest\x1a\x1d.google.longrunning.Operation"\xb4\x01\xca\x41=\n\x14GcpUserAccessBinding\x12%GcpUserAccessBindingOperationMetadata\xda\x41\x1eparent,gcp_user_access_binding\x82\xd3\xe4\x93\x02M"2/v1/{parent=organizations/*}/gcpUserAccessBindings:\x17gcp_user_access_binding\x12\xdb\x02\n\x1aUpdateGcpUserAccessBinding\x12J.google.identity.accesscontextmanager.v1.UpdateGcpUserAccessBindingRequest\x1a\x1d.google.longrunning.Operation"\xd1\x01\xca\x41=\n\x14GcpUserAccessBinding\x12%GcpUserAccessBindingOperationMetadata\xda\x41#gcp_user_access_binding,update_mask\x82\xd3\xe4\x93\x02\x65\x32J/v1/{gcp_user_access_binding.name=organizations/*/gcpUserAccessBindings/*}:\x17gcp_user_access_binding\x12\x8c\x02\n\x1a\x44\x65leteGcpUserAccessBinding\x12J.google.identity.accesscontextmanager.v1.DeleteGcpUserAccessBindingRequest\x1a\x1d.google.longrunning.Operation"\x82\x01\xca\x41>\n\x15google.protobuf.Empty\x12%GcpUserAccessBindingOperationMetadata\xda\x41\x04name\x82\xd3\xe4\x93\x02\x34*2/v1/{name=organizations/*/gcpUserAccessBindings/*}\x12\x82\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"7\x82\xd3\xe4\x93\x02\x31",/v1/{resource=accessPolicies/*}:setIamPolicy:\x01*\x12\x82\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"7\x82\xd3\xe4\x93\x02\x31",/v1/{resource=accessPolicies/*}:getIamPolicy:\x01*\x12\xbf\x02\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"\xd3\x01\x82\xd3\xe4\x93\x02\xcc\x01"2/v1/{resource=accessPolicies/*}:testIamPermissions:\x01*ZF"A/v1/{resource=accessPolicies/*/accessLevels/*}:testIamPermissions:\x01*ZK"F/v1/{resource=accessPolicies/*/servicePerimeters/*}:testIamPermissions:\x01*\x1aW\xca\x41#accesscontextmanager.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xb0\x02\n+com.google.identity.accesscontextmanager.v1B\x19\x41\x63\x63\x65ssContextManagerProtoP\x01Z\\cloud.google.com/go/accesscontextmanager/apiv1/accesscontextmanagerpb;accesscontextmanagerpb\xa2\x02\x04GACM\xaa\x02\'Google.Identity.AccessContextManager.V1\xca\x02\'Google\\Identity\\AccessContextManager\\V1\xea\x02*Google::Identity::AccessContextManager::V1b\x06proto3' diff --git a/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_context_manager_pb2.pyi b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_context_manager_pb2.pyi index 18dd69c9ff91..224259c3ce20 100644 --- a/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_context_manager_pb2.pyi +++ b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_context_manager_pb2.pyi @@ -24,6 +24,13 @@ from google.api import field_behavior_pb2 as _field_behavior_pb2 from google.api import resource_pb2 as _resource_pb2 from google.iam.v1 import iam_policy_pb2 as _iam_policy_pb2 from google.iam.v1 import policy_pb2 as _policy_pb2 +from google.longrunning import operations_pb2 as _operations_pb2 +from google.protobuf import descriptor as _descriptor +from google.protobuf import field_mask_pb2 as _field_mask_pb2 +from google.protobuf import message as _message +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper + from google.identity.accesscontextmanager.v1 import ( access_level_pb2 as _access_level_pb2, ) @@ -36,12 +43,6 @@ from google.identity.accesscontextmanager.v1 import ( from google.identity.accesscontextmanager.v1 import ( service_perimeter_pb2 as _service_perimeter_pb2, ) -from google.longrunning import operations_pb2 as _operations_pb2 -from google.protobuf import descriptor as _descriptor -from google.protobuf import field_mask_pb2 as _field_mask_pb2 -from google.protobuf import message as _message -from google.protobuf.internal import containers as _containers -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper DESCRIPTOR: _descriptor.FileDescriptor diff --git a/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_level_pb2.py b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_level_pb2.py index 08cd949e9c0b..a38fabfc12b2 100644 --- a/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_level_pb2.py +++ b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_level_pb2.py @@ -30,11 +30,12 @@ from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.type import expr_pb2 as google_dot_type_dot_expr__pb2 + from google.identity.accesscontextmanager.type import ( device_resources_pb2 as google_dot_identity_dot_accesscontextmanager_dot_type_dot_device__resources__pb2, ) -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.type import expr_pb2 as google_dot_type_dot_expr__pb2 DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( b'\n:google/identity/accesscontextmanager/v1/access_level.proto\x12\'google.identity.accesscontextmanager.v1\x1a\x19google/api/resource.proto\x1a@google/identity/accesscontextmanager/type/device_resources.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x16google/type/expr.proto"\xaa\x03\n\x0b\x41\x63\x63\x65ssLevel\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x44\n\x05\x62\x61sic\x18\x04 \x01(\x0b\x32\x33.google.identity.accesscontextmanager.v1.BasicLevelH\x00\x12\x46\n\x06\x63ustom\x18\x05 \x01(\x0b\x32\x34.google.identity.accesscontextmanager.v1.CustomLevelH\x00\x12/\n\x0b\x63reate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp:p\xea\x41m\n/accesscontextmanager.googleapis.com/AccessLevel\x12:accessPolicies/{access_policy}/accessLevels/{access_level}B\x07\n\x05level"\xef\x01\n\nBasicLevel\x12\x46\n\nconditions\x18\x01 \x03(\x0b\x32\x32.google.identity.accesscontextmanager.v1.Condition\x12j\n\x12\x63ombining_function\x18\x02 \x01(\x0e\x32N.google.identity.accesscontextmanager.v1.BasicLevel.ConditionCombiningFunction"-\n\x1a\x43onditionCombiningFunction\x12\x07\n\x03\x41ND\x10\x00\x12\x06\n\x02OR\x10\x01"\xc3\x01\n\tCondition\x12\x16\n\x0eip_subnetworks\x18\x01 \x03(\t\x12L\n\rdevice_policy\x18\x02 \x01(\x0b\x32\x35.google.identity.accesscontextmanager.v1.DevicePolicy\x12\x1e\n\x16required_access_levels\x18\x03 \x03(\t\x12\x0e\n\x06negate\x18\x05 \x01(\x08\x12\x0f\n\x07members\x18\x06 \x03(\t\x12\x0f\n\x07regions\x18\x07 \x03(\t".\n\x0b\x43ustomLevel\x12\x1f\n\x04\x65xpr\x18\x01 \x01(\x0b\x32\x11.google.type.Expr"\x89\x03\n\x0c\x44\x65vicePolicy\x12\x1a\n\x12require_screenlock\x18\x01 \x01(\x08\x12\x66\n\x1b\x61llowed_encryption_statuses\x18\x02 \x03(\x0e\x32\x41.google.identity.accesscontextmanager.type.DeviceEncryptionStatus\x12M\n\x0eos_constraints\x18\x03 \x03(\x0b\x32\x35.google.identity.accesscontextmanager.v1.OsConstraint\x12j\n allowed_device_management_levels\x18\x06 \x03(\x0e\x32@.google.identity.accesscontextmanager.type.DeviceManagementLevel\x12\x1e\n\x16require_admin_approval\x18\x07 \x01(\x08\x12\x1a\n\x12require_corp_owned\x18\x08 \x01(\x08"\x8f\x01\n\x0cOsConstraint\x12\x42\n\x07os_type\x18\x01 \x01(\x0e\x32\x31.google.identity.accesscontextmanager.type.OsType\x12\x17\n\x0fminimum_version\x18\x02 \x01(\t\x12"\n\x1arequire_verified_chrome_os\x18\x03 \x01(\x08\x42\xa7\x02\n+com.google.identity.accesscontextmanager.v1B\x10\x41\x63\x63\x65ssLevelProtoP\x01Z\\cloud.google.com/go/accesscontextmanager/apiv1/accesscontextmanagerpb;accesscontextmanagerpb\xa2\x02\x04GACM\xaa\x02\'Google.Identity.AccessContextManager.V1\xca\x02\'Google\\Identity\\AccessContextManager\\V1\xea\x02*Google::Identity::AccessContextManager::V1b\x06proto3' diff --git a/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_level_pb2.pyi b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_level_pb2.pyi index b97e687528b9..295578a1c520 100644 --- a/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_level_pb2.pyi +++ b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_level_pb2.pyi @@ -19,9 +19,6 @@ from typing import Optional as _Optional from typing import Union as _Union from google.api import resource_pb2 as _resource_pb2 -from google.identity.accesscontextmanager.type import ( - device_resources_pb2 as _device_resources_pb2, -) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import timestamp_pb2 as _timestamp_pb2 @@ -29,6 +26,10 @@ from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper from google.type import expr_pb2 as _expr_pb2 +from google.identity.accesscontextmanager.type import ( + device_resources_pb2 as _device_resources_pb2, +) + DESCRIPTOR: _descriptor.FileDescriptor class AccessLevel(_message.Message): diff --git a/packages/google-cloud-advisorynotifications/.repo-metadata.json b/packages/google-cloud-advisorynotifications/.repo-metadata.json index 45c6925daac3..bcc1dcc40288 100644 --- a/packages/google-cloud-advisorynotifications/.repo-metadata.json +++ b/packages/google-cloud-advisorynotifications/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Advisory Notifications provides well-targeted, timely, and compliant communications about critical security and privacy events in the Google Cloud console and allows you to securely investigate the event, take action, and get support.", - "api_id": "advisorynotifications.googleapis.com", - "api_shortname": "advisorynotifications", - "client_documentation": "https://cloud.google.com/python/docs/reference/advisorynotifications/latest", - "default_version": "v1", - "distribution_name": "google-cloud-advisorynotifications", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "advisorynotifications", - "name_pretty": "Advisory Notifications", - "product_documentation": "https://cloud.google.com/advisory-notifications/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Advisory Notifications provides well-targeted, timely, and compliant communications about critical security and privacy events in the Google Cloud console and allows you to securely investigate the event, take action, and get support.", + "api_id": "advisorynotifications.googleapis.com", + "api_shortname": "advisorynotifications", + "client_documentation": "https://cloud.google.com/python/docs/reference/advisorynotifications/latest", + "default_version": "v1", + "distribution_name": "google-cloud-advisorynotifications", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "advisorynotifications", + "name_pretty": "Advisory Notifications", + "product_documentation": "https://cloud.google.com/advisory-notifications/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-alloydb-connectors/.repo-metadata.json b/packages/google-cloud-alloydb-connectors/.repo-metadata.json index b9ac291696c1..660c0d94de09 100644 --- a/packages/google-cloud-alloydb-connectors/.repo-metadata.json +++ b/packages/google-cloud-alloydb-connectors/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "provides enterprise-grade performance and availability while maintaining 100% compatibility with open-source PostgreSQL.", - "api_id": "connectors.googleapis.com", - "api_shortname": "connectors", - "client_documentation": "https://cloud.google.com/python/docs/reference/connectors/latest", - "default_version": "v1", - "distribution_name": "google-cloud-alloydb-connectors", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1194526&template=1689942", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "connectors", - "name_pretty": "AlloyDB connectors", - "product_documentation": "https://cloud.google.com/alloydb/docs", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "provides enterprise-grade performance and availability while maintaining 100% compatibility with open-source PostgreSQL.", + "api_id": "connectors.googleapis.com", + "api_shortname": "connectors", + "client_documentation": "https://cloud.google.com/python/docs/reference/connectors/latest", + "default_version": "v1", + "distribution_name": "google-cloud-alloydb-connectors", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1194526\u0026template=1689942", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "connectors", + "name_pretty": "AlloyDB connectors", + "product_documentation": "https://cloud.google.com/alloydb/docs", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-alloydb/.repo-metadata.json b/packages/google-cloud-alloydb/.repo-metadata.json index cc2d8373a58d..6e7f1b9e845d 100644 --- a/packages/google-cloud-alloydb/.repo-metadata.json +++ b/packages/google-cloud-alloydb/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "", - "api_id": "alloydb.googleapis.com", - "api_shortname": "alloydb", - "client_documentation": "https://cloud.google.com/python/docs/reference/alloydb/latest", - "default_version": "v1", - "distribution_name": "google-cloud-alloydb", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "alloydb", - "name_pretty": "AlloyDB", - "product_documentation": "https://cloud.google.com/alloydb/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "AlloyDB for PostgreSQL is an open source-compatible database service that\nprovides a powerful option for migrating, modernizing, or building\ncommercial-grade applications. It offers full compatibility with standard\nPostgreSQL, and is more than 4x faster for transactional workloads and up\nto 100x faster for analytical queries than standard PostgreSQL in our\nperformance tests. AlloyDB for PostgreSQL offers a 99.99 percent\navailability SLA inclusive of maintenance. \u003cbr\u003e\u003cbr\u003e AlloyDB is optimized\nfor the most demanding use cases, allowing you to build new applications\nthat require high transaction throughput, large database sizes, or\nmultiple read resources; scale existing PostgreSQL workloads with no\napplication changes; and modernize legacy proprietary databases.", + "api_id": "alloydb.googleapis.com", + "api_shortname": "alloydb", + "client_documentation": "https://cloud.google.com/python/docs/reference/alloydb/latest", + "default_version": "v1", + "distribution_name": "google-cloud-alloydb", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "alloydb", + "name_pretty": "AlloyDB", + "product_documentation": "https://cloud.google.com/alloydb/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-alloydb/README.rst b/packages/google-cloud-alloydb/README.rst index c50be5119dc0..223bc3953346 100644 --- a/packages/google-cloud-alloydb/README.rst +++ b/packages/google-cloud-alloydb/README.rst @@ -3,7 +3,17 @@ Python Client for AlloyDB |preview| |pypi| |versions| -`AlloyDB`_: +`AlloyDB`_: AlloyDB for PostgreSQL is an open source-compatible database service that +provides a powerful option for migrating, modernizing, or building +commercial-grade applications. It offers full compatibility with standard +PostgreSQL, and is more than 4x faster for transactional workloads and up +to 100x faster for analytical queries than standard PostgreSQL in our +performance tests. AlloyDB for PostgreSQL offers a 99.99 percent +availability SLA inclusive of maintenance.

AlloyDB is optimized +for the most demanding use cases, allowing you to build new applications +that require high transaction throughput, large database sizes, or +multiple read resources; scale existing PostgreSQL workloads with no +application changes; and modernize legacy proprietary databases. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-alloydb/docs/README.rst b/packages/google-cloud-alloydb/docs/README.rst index c50be5119dc0..223bc3953346 100644 --- a/packages/google-cloud-alloydb/docs/README.rst +++ b/packages/google-cloud-alloydb/docs/README.rst @@ -3,7 +3,17 @@ Python Client for AlloyDB |preview| |pypi| |versions| -`AlloyDB`_: +`AlloyDB`_: AlloyDB for PostgreSQL is an open source-compatible database service that +provides a powerful option for migrating, modernizing, or building +commercial-grade applications. It offers full compatibility with standard +PostgreSQL, and is more than 4x faster for transactional workloads and up +to 100x faster for analytical queries than standard PostgreSQL in our +performance tests. AlloyDB for PostgreSQL offers a 99.99 percent +availability SLA inclusive of maintenance.

AlloyDB is optimized +for the most demanding use cases, allowing you to build new applications +that require high transaction throughput, large database sizes, or +multiple read resources; scale existing PostgreSQL workloads with no +application changes; and modernize legacy proprietary databases. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-api-gateway/.repo-metadata.json b/packages/google-cloud-api-gateway/.repo-metadata.json index 7af614581916..6893ca38598b 100644 --- a/packages/google-cloud-api-gateway/.repo-metadata.json +++ b/packages/google-cloud-api-gateway/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "enables you to provide secure access to your backend services through a well-defined REST API that is consistent across all of your services, regardless of the service implementation. Clients consume your REST APIS to implement standalone apps for a mobile device or tablet, through apps running in a browser, or through any other type of app that can make a request to an HTTP endpoint.", - "api_id": "apigateway.googleapis.com", - "api_shortname": "apigateway", - "client_documentation": "https://cloud.google.com/python/docs/reference/apigateway/latest", - "default_version": "v1", - "distribution_name": "google-cloud-api-gateway", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "apigateway", - "name_pretty": "API Gateway", - "product_documentation": "https://cloud.google.com/api-gateway", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "enables you to provide secure access to your backend services through a well-defined REST API that is consistent across all of your services, regardless of the service implementation. Clients consume your REST APIS to implement standalone apps for a mobile device or tablet, through apps running in a browser, or through any other type of app that can make a request to an HTTP endpoint.", + "api_id": "apigateway.googleapis.com", + "api_shortname": "apigateway", + "client_documentation": "https://cloud.google.com/python/docs/reference/apigateway/latest", + "default_version": "v1", + "distribution_name": "google-cloud-api-gateway", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "apigateway", + "name_pretty": "API Gateway", + "product_documentation": "https://cloud.google.com/api-gateway", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-api-keys/.repo-metadata.json b/packages/google-cloud-api-keys/.repo-metadata.json index 348f222b9847..b9762fa8ae32 100644 --- a/packages/google-cloud-api-keys/.repo-metadata.json +++ b/packages/google-cloud-api-keys/.repo-metadata.json @@ -1,14 +1,15 @@ { - "api_id": "apikeys.googleapis.com", - "api_shortname": "apikeys", - "client_documentation": "https://cloud.google.com/python/docs/reference/apikeys/latest", - "default_version": "v2", - "distribution_name": "google-cloud-api-keys", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "apikeys", - "name_pretty": "API Keys", - "product_documentation": "https://cloud.google.com/api-keys/docs", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Manages the API keys associated with developer projects.", + "api_id": "apikeys.googleapis.com", + "api_shortname": "apikeys", + "client_documentation": "https://cloud.google.com/python/docs/reference/apikeys/latest", + "default_version": "v2", + "distribution_name": "google-cloud-api-keys", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "apikeys", + "name_pretty": "API Keys", + "product_documentation": "https://cloud.google.com/api-keys/docs", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-api-keys/README.rst b/packages/google-cloud-api-keys/README.rst index ecf0659634ab..ff5fb4677f42 100644 --- a/packages/google-cloud-api-keys/README.rst +++ b/packages/google-cloud-api-keys/README.rst @@ -3,7 +3,7 @@ Python Client for API Keys |preview| |pypi| |versions| -`API Keys`_: +`API Keys`_: Manages the API keys associated with developer projects. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-api-keys/docs/README.rst b/packages/google-cloud-api-keys/docs/README.rst index ecf0659634ab..ff5fb4677f42 100644 --- a/packages/google-cloud-api-keys/docs/README.rst +++ b/packages/google-cloud-api-keys/docs/README.rst @@ -3,7 +3,7 @@ Python Client for API Keys |preview| |pypi| |versions| -`API Keys`_: +`API Keys`_: Manages the API keys associated with developer projects. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-apigee-connect/.repo-metadata.json b/packages/google-cloud-apigee-connect/.repo-metadata.json index 597c4299a3af..e01f60565e8f 100644 --- a/packages/google-cloud-apigee-connect/.repo-metadata.json +++ b/packages/google-cloud-apigee-connect/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "allows the Apigee hybrid management plane to connect securely to the MART service in the runtime plane without requiring you to expose the MART endpoint on the internet.", - "api_id": "apigeeconnect.googleapis.com", - "api_shortname": "apigeeconnect", - "client_documentation": "https://cloud.google.com/python/docs/reference/apigeeconnect/latest", - "default_version": "v1", - "distribution_name": "google-cloud-apigee-connect", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "apigeeconnect", - "name_pretty": "Apigee Connect", - "product_documentation": "https://cloud.google.com/apigee/docs/hybrid/v1.4/apigee-connect", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "allows the Apigee hybrid management plane to connect securely to the MART service in the runtime plane without requiring you to expose the MART endpoint on the internet.", + "api_id": "apigeeconnect.googleapis.com", + "api_shortname": "apigeeconnect", + "client_documentation": "https://cloud.google.com/python/docs/reference/apigeeconnect/latest", + "default_version": "v1", + "distribution_name": "google-cloud-apigee-connect", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "apigeeconnect", + "name_pretty": "Apigee Connect", + "product_documentation": "https://cloud.google.com/apigee/docs/hybrid/v1.4/apigee-connect", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-apigee-registry/.repo-metadata.json b/packages/google-cloud-apigee-registry/.repo-metadata.json index 4e1a74015884..8bfd39ddcc1a 100644 --- a/packages/google-cloud-apigee-registry/.repo-metadata.json +++ b/packages/google-cloud-apigee-registry/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "allows teams to upload and share machine-readable descriptions of APIs that are in use and in development.", - "api_id": "apigeeregistry.googleapis.com", - "api_shortname": "apigeeregistry", - "client_documentation": "https://cloud.google.com/python/docs/reference/apigeeregistry/latest", - "default_version": "v1", - "distribution_name": "google-cloud-apigee-registry", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "apigeeregistry", - "name_pretty": "Apigee Registry API", - "product_documentation": "https://cloud.google.com/apigee/docs/api-hub/get-started-registry-api", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "allows teams to upload and share machine-readable descriptions of APIs that are in use and in development.", + "api_id": "apigeeregistry.googleapis.com", + "api_shortname": "apigeeregistry", + "client_documentation": "https://cloud.google.com/python/docs/reference/apigeeregistry/latest", + "default_version": "v1", + "distribution_name": "google-cloud-apigee-registry", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "apigeeregistry", + "name_pretty": "Apigee Registry API", + "product_documentation": "https://cloud.google.com/apigee/docs/api-hub/get-started-registry-api", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-apihub/.repo-metadata.json b/packages/google-cloud-apihub/.repo-metadata.json index 7cb5b49720ae..713dc0d49812 100644 --- a/packages/google-cloud-apihub/.repo-metadata.json +++ b/packages/google-cloud-apihub/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "API hub lets you consolidate and organize information about all of the APIs of interest to your organization. API hub lets you capture critical information about APIs that allows developers to discover and evaluate them easily and leverage the work of other teams wherever possible. API platform teams can use API hub to have visibility into and manage their portfolio of APIs.", - "api_id": "apihub.googleapis.com", - "api_shortname": "apihub", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-apihub/latest", - "default_version": "v1", - "distribution_name": "google-cloud-apihub", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1447560", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-apihub", - "name_pretty": "API Hub API", - "product_documentation": "https://cloud.google.com/apigee/docs/apihub/what-is-api-hub", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "API hub lets you consolidate and organize information about all of the APIs of interest to your organization. API hub lets you capture critical information about APIs that allows developers to discover and evaluate them easily and leverage the work of other teams wherever possible. API platform teams can use API hub to have visibility into and manage their portfolio of APIs.", + "api_id": "apihub.googleapis.com", + "api_shortname": "apihub", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-apihub/latest", + "default_version": "v1", + "distribution_name": "google-cloud-apihub", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1447560", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-apihub", + "name_pretty": "API Hub API", + "product_documentation": "https://cloud.google.com/apigee/docs/apihub/what-is-api-hub", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-apiregistry/.repo-metadata.json b/packages/google-cloud-apiregistry/.repo-metadata.json index 171a6d305571..800c182b66f6 100644 --- a/packages/google-cloud-apiregistry/.repo-metadata.json +++ b/packages/google-cloud-apiregistry/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "", - "api_id": "cloudapiregistry.googleapis.com", - "api_shortname": "cloudapiregistry", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-apiregistry/latest", - "default_version": "v1beta", - "distribution_name": "google-cloud-apiregistry", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1979613&template=2231768", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-apiregistry", - "name_pretty": "Cloud API Registry API", - "product_documentation": "https://docs.cloud.google.com/api-registry/docs/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" -} + "api_id": "cloudapiregistry.googleapis.com", + "api_shortname": "cloudapiregistry", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-apiregistry/latest", + "default_version": "v1beta", + "distribution_name": "google-cloud-apiregistry", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1979613\u0026template=2231768", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-apiregistry", + "name_pretty": "Cloud API Registry API", + "product_documentation": "https://docs.cloud.google.com/api-registry/docs/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-appengine-admin/.repo-metadata.json b/packages/google-cloud-appengine-admin/.repo-metadata.json index 93f71b3e9368..d8863b5763ac 100644 --- a/packages/google-cloud-appengine-admin/.repo-metadata.json +++ b/packages/google-cloud-appengine-admin/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "allows you to manage your App Engine applications.", - "api_id": "appengine.googleapis.com", - "api_shortname": "appengine", - "client_documentation": "https://cloud.google.com/python/docs/reference/appengine/latest", - "default_version": "v1", - "distribution_name": "google-cloud-appengine-admin", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "appengine", - "name_pretty": "App Engine Admin", - "product_documentation": "https://cloud.google.com/appengine/docs/admin-api/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "allows you to manage your App Engine applications.", + "api_id": "appengine.googleapis.com", + "api_shortname": "appengine", + "client_documentation": "https://cloud.google.com/python/docs/reference/appengine/latest", + "default_version": "v1", + "distribution_name": "google-cloud-appengine-admin", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "appengine", + "name_pretty": "App Engine Admin", + "product_documentation": "https://cloud.google.com/appengine/docs/admin-api/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-appengine-logging/.repo-metadata.json b/packages/google-cloud-appengine-logging/.repo-metadata.json index 32668f48938f..25759b297132 100644 --- a/packages/google-cloud-appengine-logging/.repo-metadata.json +++ b/packages/google-cloud-appengine-logging/.repo-metadata.json @@ -1,14 +1,13 @@ { - "api_id": "", - "client_documentation": "https://cloud.google.com/python/docs/reference/appenginelogging/latest", - "default_version": "v1", - "distribution_name": "google-cloud-appengine-logging", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "OTHER", - "name": "appenginelogging", - "name_pretty": "App Engine Logging Protos", - "product_documentation": "https://cloud.google.com/logging/docs/reference/v2/rpc/google.appengine.logging.v1", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "client_documentation": "https://cloud.google.com/python/docs/reference/appenginelogging/latest", + "default_version": "v1", + "distribution_name": "google-cloud-appengine-logging", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "OTHER", + "name": "appenginelogging", + "name_pretty": "App Engine Logging Protos", + "product_documentation": "https://cloud.google.com/logging/docs/reference/v2/rpc/google.appengine.logging.v1", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-apphub/.repo-metadata.json b/packages/google-cloud-apphub/.repo-metadata.json index 242e7c068dcd..b59013788d7c 100644 --- a/packages/google-cloud-apphub/.repo-metadata.json +++ b/packages/google-cloud-apphub/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "null ", - "api_id": "apphub.googleapis.com", - "api_shortname": "apphub", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-apphub/latest", - "default_version": "v1", - "distribution_name": "google-cloud-apphub", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1509913", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-apphub", - "name_pretty": "App Hub API", - "product_documentation": "https://cloud.google.com/app-hub/docs/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "null ", + "api_id": "apphub.googleapis.com", + "api_shortname": "apphub", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-apphub/latest", + "default_version": "v1", + "distribution_name": "google-cloud-apphub", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1509913", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-apphub", + "name_pretty": "App Hub API", + "product_documentation": "https://cloud.google.com/app-hub/docs/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-artifact-registry/.repo-metadata.json b/packages/google-cloud-artifact-registry/.repo-metadata.json index d2721b2f19ea..32bbed8cd326 100644 --- a/packages/google-cloud-artifact-registry/.repo-metadata.json +++ b/packages/google-cloud-artifact-registry/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "provides a single place for your organization to manage container images and language packages (such as Maven and npm). It is fully integrated with Google Cloud's tooling and runtimes and comes with support for native artifact protocols. This makes it simple to integrate it with your CI/CD tooling to set up automated pipelines.", - "api_id": "artifactregistry.googleapis.com", - "api_shortname": "artifactregistry", - "client_documentation": "https://cloud.google.com/python/docs/reference/artifactregistry/latest", - "default_version": "v1", - "distribution_name": "google-cloud-artifact-registry", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "artifactregistry", - "name_pretty": "Artifact Registry", - "product_documentation": "https://cloud.google.com/artifact-registry", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "provides a single place for your organization to manage container images and language packages (such as Maven and npm). It is fully integrated with Google Cloud's tooling and runtimes and comes with support for native artifact protocols. This makes it simple to integrate it with your CI/CD tooling to set up automated pipelines.", + "api_id": "artifactregistry.googleapis.com", + "api_shortname": "artifactregistry", + "client_documentation": "https://cloud.google.com/python/docs/reference/artifactregistry/latest", + "default_version": "v1", + "distribution_name": "google-cloud-artifact-registry", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "artifactregistry", + "name_pretty": "Artifact Registry", + "product_documentation": "https://cloud.google.com/artifact-registry", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-asset/.repo-metadata.json b/packages/google-cloud-asset/.repo-metadata.json index 8cea3d2aef89..3e1f8a081c58 100644 --- a/packages/google-cloud-asset/.repo-metadata.json +++ b/packages/google-cloud-asset/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "provides inventory services based on a time series database. This database keeps a five week history of Google Cloud asset metadata. The Cloud Asset Inventory export service allows you to export all asset metadata at a certain timestamp or export event change history during a timeframe.", - "api_id": "cloudasset.googleapis.com", - "api_shortname": "cloudasset", - "client_documentation": "https://cloud.google.com/python/docs/reference/cloudasset/latest", - "default_version": "v1", - "distribution_name": "google-cloud-asset", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559757", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "cloudasset", - "name_pretty": "Cloud Asset Inventory", - "product_documentation": "https://cloud.google.com/resource-manager/docs/cloud-asset-inventory/overview", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "provides inventory services based on a time series database. This database keeps a five week history of Google Cloud asset metadata. The Cloud Asset Inventory export service allows you to export all asset metadata at a certain timestamp or export event change history during a timeframe.", + "api_id": "cloudasset.googleapis.com", + "api_shortname": "cloudasset", + "client_documentation": "https://cloud.google.com/python/docs/reference/cloudasset/latest", + "default_version": "v1", + "distribution_name": "google-cloud-asset", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559757", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "cloudasset", + "name_pretty": "Cloud Asset Inventory", + "product_documentation": "https://cloud.google.com/resource-manager/docs/cloud-asset-inventory/overview", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-assured-workloads/.repo-metadata.json b/packages/google-cloud-assured-workloads/.repo-metadata.json index cb8c739c73c4..76eb8f32ec6a 100644 --- a/packages/google-cloud-assured-workloads/.repo-metadata.json +++ b/packages/google-cloud-assured-workloads/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "allows you to secure your government workloads and accelerate your path to running compliant workloads on Google Cloud with Assured Workloads for Government.", - "api_id": "assuredworkloads.googleapis.com", - "api_shortname": "assuredworkloads", - "client_documentation": "https://cloud.google.com/python/docs/reference/assuredworkloads/latest", - "default_version": "v1", - "distribution_name": "google-cloud-assured-workloads", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "assuredworkloads", - "name_pretty": "Assured Workloads for Government", - "product_documentation": "https://cloud.google.com/assured-workloads/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "allows you to secure your government workloads and accelerate your path to running compliant workloads on Google Cloud with Assured Workloads for Government.", + "api_id": "assuredworkloads.googleapis.com", + "api_shortname": "assuredworkloads", + "client_documentation": "https://cloud.google.com/python/docs/reference/assuredworkloads/latest", + "default_version": "v1", + "distribution_name": "google-cloud-assured-workloads", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "assuredworkloads", + "name_pretty": "Assured Workloads for Government", + "product_documentation": "https://cloud.google.com/assured-workloads/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-audit-log/.repo-metadata.json b/packages/google-cloud-audit-log/.repo-metadata.json index 60fa354febcd..8d4f3100c37d 100644 --- a/packages/google-cloud-audit-log/.repo-metadata.json +++ b/packages/google-cloud-audit-log/.repo-metadata.json @@ -1,15 +1,14 @@ { - "api_id": "", - "api_shortname": "auditlog", - "client_documentation": "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-audit-log", - "default_version": "apiVersion", - "distribution_name": "google-cloud-audit-log", - "issue_tracker": "", - "language": "python", - "library_type": "OTHER", - "name": "auditlog", - "name_pretty": "Audit Log API", - "product_documentation": "https://cloud.google.com/logging/docs/audit", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_id": "cloudaudit.googleapis.com", + "api_shortname": "auditlog", + "client_documentation": "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-audit-log", + "default_version": "apiVersion", + "distribution_name": "google-cloud-audit-log", + "language": "python", + "library_type": "OTHER", + "name": "auditlog", + "name_pretty": "Audit Log API", + "product_documentation": "https://cloud.google.com/logging/docs/audit", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-auditmanager/.repo-metadata.json b/packages/google-cloud-auditmanager/.repo-metadata.json index 26f4ff392124..0bccb2bf38f2 100644 --- a/packages/google-cloud-auditmanager/.repo-metadata.json +++ b/packages/google-cloud-auditmanager/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "", - "api_id": "auditmanager.googleapis.com", - "api_shortname": "auditmanager", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-auditmanager/latest", - "default_version": "v1", - "distribution_name": "google-cloud-auditmanager", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1335397&template=0", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-auditmanager", - "name_pretty": "Audit Manager API", - "product_documentation": "https://cloud.google.com/audit-manager/docs", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" -} + "api_id": "auditmanager.googleapis.com", + "api_shortname": "auditmanager", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-auditmanager/latest", + "default_version": "v1", + "distribution_name": "google-cloud-auditmanager", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1335397\u0026template=0", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-auditmanager", + "name_pretty": "Audit Manager API", + "product_documentation": "https://cloud.google.com/audit-manager/docs", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-automl/.repo-metadata.json b/packages/google-cloud-automl/.repo-metadata.json index 6765aa5537c5..d9fb1fd20693 100644 --- a/packages/google-cloud-automl/.repo-metadata.json +++ b/packages/google-cloud-automl/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "**AutoML API Python Client is now available in Vertex AI. Please visit** `Vertex SDK for Python `_ **for the new Python Vertex AI client.** Vertex AI is our next generation AI Platform, with many new features that are unavailable in the current platform. `Migrate your resources to Vertex AI `_ to get the latest machine learning features, simplify end-to-end journeys, and productionize models with MLOps. The `Cloud AutoML API `_ is a suite of machine learning products that enables developers with limited machine learning expertise to train high-quality models specific to their business needs, by leveraging Google's state-of-the-art transfer learning, and Neural Architecture Search technology.", - "api_id": "automl.googleapis.com", - "api_shortname": "automl", - "client_documentation": "https://cloud.google.com/python/docs/reference/automl/latest", - "default_version": "v1", - "distribution_name": "google-cloud-automl", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559744", - "language": "python", - "library_type": "GAPIC_COMBO", - "name": "automl", - "name_pretty": "Cloud AutoML", - "product_documentation": "https://cloud.google.com/automl/docs/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "**AutoML API Python Client is now available in Vertex AI. Please visit** `Vertex SDK for Python \u003chttps://github.com/googleapis/python-aiplatform\u003e`_ **for the new Python Vertex AI client.** Vertex AI is our next generation AI Platform, with many new features that are unavailable in the current platform. `Migrate your resources to Vertex AI \u003chttps://cloud.google.com/vertex-ai/docs/start/migrating-to-vertex-ai\u003e`_ to get the latest machine learning features, simplify end-to-end journeys, and productionize models with MLOps. The `Cloud AutoML API \u003chttps://cloud.google.com/automl\u003e`_ is a suite of machine learning products that enables developers with limited machine learning expertise to train high-quality models specific to their business needs, by leveraging Google's state-of-the-art transfer learning, and Neural Architecture Search technology.", + "api_id": "automl.googleapis.com", + "api_shortname": "automl", + "client_documentation": "https://cloud.google.com/python/docs/reference/automl/latest", + "default_version": "v1", + "distribution_name": "google-cloud-automl", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559744", + "language": "python", + "library_type": "GAPIC_COMBO", + "name": "automl", + "name_pretty": "Cloud AutoML", + "product_documentation": "https://cloud.google.com/automl/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-backupdr/.repo-metadata.json b/packages/google-cloud-backupdr/.repo-metadata.json index 91669681694d..80b38cffb999 100644 --- a/packages/google-cloud-backupdr/.repo-metadata.json +++ b/packages/google-cloud-backupdr/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Backup and DR Service ensures that your data is managed, protected, and accessible using both hybrid and cloud-based backup/recovery appliances that are managed using the Backup and DR management console.", - "api_id": "backupdr.googleapis.com", - "api_shortname": "backupdr", - "client_documentation": "https://cloud.google.com/python/docs/reference/backupdr/latest", - "default_version": "v1", - "distribution_name": "google-cloud-backupdr", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=966572", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "backupdr", - "name_pretty": "Backup and DR Service API", - "product_documentation": "https://cloud.google.com/backup-disaster-recovery/docs/concepts/backup-dr", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Backup and DR Service ensures that your data is managed, protected, and accessible using both hybrid and cloud-based backup/recovery appliances that are managed using the Backup and DR management console.", + "api_id": "backupdr.googleapis.com", + "api_shortname": "backupdr", + "client_documentation": "https://cloud.google.com/python/docs/reference/backupdr/latest", + "default_version": "v1", + "distribution_name": "google-cloud-backupdr", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=966572", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "backupdr", + "name_pretty": "Backup and DR Service API", + "product_documentation": "https://cloud.google.com/backup-disaster-recovery/docs/concepts/backup-dr", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-bare-metal-solution/.repo-metadata.json b/packages/google-cloud-bare-metal-solution/.repo-metadata.json index 710b05be9537..bf312727bcb5 100644 --- a/packages/google-cloud-bare-metal-solution/.repo-metadata.json +++ b/packages/google-cloud-bare-metal-solution/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Bring your Oracle workloads to Google Cloud with Bare Metal Solution and jumpstart your cloud journey with minimal risk.", - "api_id": "baremetalsolution.googleapis.com", - "api_shortname": "baremetalsolution", - "client_documentation": "https://cloud.google.com/python/docs/reference/baremetalsolution/latest", - "default_version": "v2", - "distribution_name": "google-cloud-bare-metal-solution", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "baremetalsolution", - "name_pretty": "Bare Metal Solution", - "product_documentation": "https://cloud.google.com/bare-metal/docs", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Bring your Oracle workloads to Google Cloud with Bare Metal Solution and jumpstart your cloud journey with minimal risk.", + "api_id": "baremetalsolution.googleapis.com", + "api_shortname": "baremetalsolution", + "client_documentation": "https://cloud.google.com/python/docs/reference/baremetalsolution/latest", + "default_version": "v2", + "distribution_name": "google-cloud-bare-metal-solution", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "baremetalsolution", + "name_pretty": "Bare Metal Solution", + "product_documentation": "https://cloud.google.com/bare-metal/docs", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-batch/.repo-metadata.json b/packages/google-cloud-batch/.repo-metadata.json index dfee26c270a8..b8e318f7cd78 100644 --- a/packages/google-cloud-batch/.repo-metadata.json +++ b/packages/google-cloud-batch/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "", - "api_id": "batch.googleapis.com", - "api_shortname": "batch", - "client_documentation": "https://cloud.google.com/python/docs/reference/batch/latest", - "default_version": "v1", - "distribution_name": "google-cloud-batch", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "batch", - "name_pretty": "Cloud Batch", - "product_documentation": "https://cloud.google.com/batch/docs", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "An API to manage the running of Batch resources on Google Cloud Platform.", + "api_id": "batch.googleapis.com", + "api_shortname": "batch", + "client_documentation": "https://cloud.google.com/python/docs/reference/batch/latest", + "default_version": "v1", + "distribution_name": "google-cloud-batch", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "batch", + "name_pretty": "Cloud Batch", + "product_documentation": "https://cloud.google.com/batch/docs", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-batch/README.rst b/packages/google-cloud-batch/README.rst index 3a0226dd5c69..afa62450433d 100644 --- a/packages/google-cloud-batch/README.rst +++ b/packages/google-cloud-batch/README.rst @@ -3,7 +3,7 @@ Python Client for Cloud Batch |preview| |pypi| |versions| -`Cloud Batch`_: +`Cloud Batch`_: An API to manage the running of Batch resources on Google Cloud Platform. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-batch/docs/README.rst b/packages/google-cloud-batch/docs/README.rst index 3a0226dd5c69..afa62450433d 100644 --- a/packages/google-cloud-batch/docs/README.rst +++ b/packages/google-cloud-batch/docs/README.rst @@ -3,7 +3,7 @@ Python Client for Cloud Batch |preview| |pypi| |versions| -`Cloud Batch`_: +`Cloud Batch`_: An API to manage the running of Batch resources on Google Cloud Platform. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-beyondcorp-appconnections/.repo-metadata.json b/packages/google-cloud-beyondcorp-appconnections/.repo-metadata.json index 68c2ce995e7a..a2d5366b276d 100644 --- a/packages/google-cloud-beyondcorp-appconnections/.repo-metadata.json +++ b/packages/google-cloud-beyondcorp-appconnections/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Beyondcorp Enterprise provides identity and context aware access controls for enterprise resources and enables zero-trust access. Using the Beyondcorp Enterprise APIs, enterprises can set up multi-cloud and on-prem connectivity using the App Connector hybrid connectivity solution.", - "api_id": "beyondcorp.googleapis.com", - "api_shortname": "beyondcorp", - "client_documentation": "https://cloud.google.com/python/docs/reference/beyondcorpappconnections/latest", - "default_version": "v1", - "distribution_name": "google-cloud-beyondcorp-appconnections", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "beyondcorpappconnections", - "name_pretty": "BeyondCorp AppConnections", - "product_documentation": "https://cloud.google.com/beyondcorp/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Beyondcorp Enterprise provides identity and context aware access controls for enterprise resources and enables zero-trust access. Using the Beyondcorp Enterprise APIs, enterprises can set up multi-cloud and on-prem connectivity using the App Connector hybrid connectivity solution.", + "api_id": "beyondcorp.googleapis.com", + "api_shortname": "beyondcorp", + "client_documentation": "https://cloud.google.com/python/docs/reference/beyondcorpappconnections/latest", + "default_version": "v1", + "distribution_name": "google-cloud-beyondcorp-appconnections", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "beyondcorpappconnections", + "name_pretty": "BeyondCorp AppConnections", + "product_documentation": "https://cloud.google.com/beyondcorp/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-beyondcorp-appconnectors/.repo-metadata.json b/packages/google-cloud-beyondcorp-appconnectors/.repo-metadata.json index 95d244b635e6..1380d906bec9 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/.repo-metadata.json +++ b/packages/google-cloud-beyondcorp-appconnectors/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Beyondcorp Enterprise provides identity and context aware access controls for enterprise resources and enables zero-trust access. Using the Beyondcorp Enterprise APIs, enterprises can set up multi-cloud and on-prem connectivity using the App Connector hybrid connectivity solution.", - "api_id": "beyondcorp.googleapis.com", - "api_shortname": "beyondcorp", - "client_documentation": "https://cloud.google.com/python/docs/reference/beyondcorpappconnectors/latest", - "default_version": "v1", - "distribution_name": "google-cloud-beyondcorp-appconnectors", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "beyondcorpappconnectors", - "name_pretty": "BeyondCorp AppConnectors", - "product_documentation": "https://cloud.google.com/beyondcorp/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Beyondcorp Enterprise provides identity and context aware access controls for enterprise resources and enables zero-trust access. Using the Beyondcorp Enterprise APIs, enterprises can set up multi-cloud and on-prem connectivity using the App Connector hybrid connectivity solution.", + "api_id": "beyondcorp.googleapis.com", + "api_shortname": "beyondcorp", + "client_documentation": "https://cloud.google.com/python/docs/reference/beyondcorpappconnectors/latest", + "default_version": "v1", + "distribution_name": "google-cloud-beyondcorp-appconnectors", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "beyondcorpappconnectors", + "name_pretty": "BeyondCorp AppConnectors", + "product_documentation": "https://cloud.google.com/beyondcorp/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-beyondcorp-appgateways/.repo-metadata.json b/packages/google-cloud-beyondcorp-appgateways/.repo-metadata.json index 067889f724a0..be4026abe14b 100644 --- a/packages/google-cloud-beyondcorp-appgateways/.repo-metadata.json +++ b/packages/google-cloud-beyondcorp-appgateways/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Beyondcorp Enterprise provides identity and context aware access controls for enterprise resources and enables zero-trust access. Using the Beyondcorp Enterprise APIs, enterprises can set up multi-cloud and on-prem connectivity using the App Connector hybrid connectivity solution.", - "api_id": "beyondcorp.googleapis.com", - "api_shortname": "beyondcorp", - "client_documentation": "https://cloud.google.com/python/docs/reference/beyondcorpappgateways/latest", - "default_version": "v1", - "distribution_name": "google-cloud-beyondcorp-appgateways", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "beyondcorpappgateways", - "name_pretty": "BeyondCorp AppGateways", - "product_documentation": "https://cloud.google.com/beyondcorp/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Beyondcorp Enterprise provides identity and context aware access controls for enterprise resources and enables zero-trust access. Using the Beyondcorp Enterprise APIs, enterprises can set up multi-cloud and on-prem connectivity using the App Connector hybrid connectivity solution.", + "api_id": "beyondcorp.googleapis.com", + "api_shortname": "beyondcorp", + "client_documentation": "https://cloud.google.com/python/docs/reference/beyondcorpappgateways/latest", + "default_version": "v1", + "distribution_name": "google-cloud-beyondcorp-appgateways", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "beyondcorpappgateways", + "name_pretty": "BeyondCorp AppGateways", + "product_documentation": "https://cloud.google.com/beyondcorp/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/.repo-metadata.json b/packages/google-cloud-beyondcorp-clientconnectorservices/.repo-metadata.json index e39b92abd4e0..dfcb72810b2f 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/.repo-metadata.json +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Beyondcorp Enterprise provides identity and context aware access controls for enterprise resources and enables zero-trust access. Using the Beyondcorp Enterprise APIs, enterprises can set up multi-cloud and on-prem connectivity using the App Connector hybrid connectivity solution.", - "api_id": "beyondcorp.googleapis.com", - "api_shortname": "beyondcorp", - "client_documentation": "https://cloud.google.com/python/docs/reference/beyondcorpclientconnectorservices/latest", - "default_version": "v1", - "distribution_name": "google-cloud-beyondcorp-clientconnectorservices", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "beyondcorpclientconnectorservices", - "name_pretty": "BeyondCorp ClientConnectorServices", - "product_documentation": "https://cloud.google.com/beyondcorp/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Beyondcorp Enterprise provides identity and context aware access controls for enterprise resources and enables zero-trust access. Using the Beyondcorp Enterprise APIs, enterprises can set up multi-cloud and on-prem connectivity using the App Connector hybrid connectivity solution.", + "api_id": "beyondcorp.googleapis.com", + "api_shortname": "beyondcorp", + "client_documentation": "https://cloud.google.com/python/docs/reference/beyondcorpclientconnectorservices/latest", + "default_version": "v1", + "distribution_name": "google-cloud-beyondcorp-clientconnectorservices", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "beyondcorpclientconnectorservices", + "name_pretty": "BeyondCorp ClientConnectorServices", + "product_documentation": "https://cloud.google.com/beyondcorp/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-beyondcorp-clientgateways/.repo-metadata.json b/packages/google-cloud-beyondcorp-clientgateways/.repo-metadata.json index d0f0676d23a5..fccbeb8980b6 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/.repo-metadata.json +++ b/packages/google-cloud-beyondcorp-clientgateways/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Beyondcorp Enterprise provides identity and context aware access controls for enterprise resources and enables zero-trust access. Using the Beyondcorp Enterprise APIs, enterprises can set up multi-cloud and on-prem connectivity using the App Connector hybrid connectivity solution.", - "api_id": "beyondcorp.googleapis.com", - "api_shortname": "beyondcorp", - "client_documentation": "https://cloud.google.com/python/docs/reference/beyondcorpclientgateways/latest", - "default_version": "v1", - "distribution_name": "google-cloud-beyondcorp-clientgateways", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "beyondcorpclientgateways", - "name_pretty": "BeyondCorp ClientGateways", - "product_documentation": "https://cloud.google.com/beyondcorp/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Beyondcorp Enterprise provides identity and context aware access controls for enterprise resources and enables zero-trust access. Using the Beyondcorp Enterprise APIs, enterprises can set up multi-cloud and on-prem connectivity using the App Connector hybrid connectivity solution.", + "api_id": "beyondcorp.googleapis.com", + "api_shortname": "beyondcorp", + "client_documentation": "https://cloud.google.com/python/docs/reference/beyondcorpclientgateways/latest", + "default_version": "v1", + "distribution_name": "google-cloud-beyondcorp-clientgateways", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "beyondcorpclientgateways", + "name_pretty": "BeyondCorp ClientGateways", + "product_documentation": "https://cloud.google.com/beyondcorp/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-biglake-hive/.repo-metadata.json b/packages/google-cloud-biglake-hive/.repo-metadata.json index 9f01b457d655..a2c40f09d551 100644 --- a/packages/google-cloud-biglake-hive/.repo-metadata.json +++ b/packages/google-cloud-biglake-hive/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "The BigLake API provides access to BigLake Metastore, a serverless, fully\nmanaged, and highly available metastore for open-source data that can be\nused for querying Apache Iceberg tables in BigQuery.", - "api_id": "biglake.googleapis.com", - "api_shortname": "biglake", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-biglake-hive/latest", - "default_version": "v1beta", - "distribution_name": "google-cloud-biglake-hive", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=187149&template=1019829", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-biglake-hive", - "name_pretty": "BigLake API", - "product_documentation": "https://cloud.google.com/bigquery/docs/iceberg-tables#create-using-biglake-metastore", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" -} + "api_description": "The BigLake API provides access to BigLake Metastore, a serverless, fully\nmanaged, and highly available metastore for open-source data that can be\nused for querying Apache Iceberg tables in BigQuery.", + "api_id": "biglake.googleapis.com", + "api_shortname": "biglake", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-biglake-hive/latest", + "default_version": "v1beta", + "distribution_name": "google-cloud-biglake-hive", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=187149\u0026template=1019829", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-biglake-hive", + "name_pretty": "BigLake API", + "product_documentation": "https://cloud.google.com/bigquery/docs/iceberg-tables#create-using-biglake-metastore", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-biglake-hive/README.rst b/packages/google-cloud-biglake-hive/README.rst index 356dcafe3ec9..955d274c3911 100644 --- a/packages/google-cloud-biglake-hive/README.rst +++ b/packages/google-cloud-biglake-hive/README.rst @@ -63,14 +63,14 @@ Supported Python Versions Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of Python. -Python >= 3.7, including 3.14 +Python >= 3.9, including 3.14 .. _active: https://devguide.python.org/devcycle/#in-development-main-branch .. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python <= 3.6 +Python <= 3.8 If you are using an `end-of-life`_ version of Python, we recommend that you update as soon as possible to an actively supported version. diff --git a/packages/google-cloud-biglake-hive/docs/README.rst b/packages/google-cloud-biglake-hive/docs/README.rst index 356dcafe3ec9..955d274c3911 100644 --- a/packages/google-cloud-biglake-hive/docs/README.rst +++ b/packages/google-cloud-biglake-hive/docs/README.rst @@ -63,14 +63,14 @@ Supported Python Versions Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of Python. -Python >= 3.7, including 3.14 +Python >= 3.9, including 3.14 .. _active: https://devguide.python.org/devcycle/#in-development-main-branch .. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python <= 3.6 +Python <= 3.8 If you are using an `end-of-life`_ version of Python, we recommend that you update as soon as possible to an actively supported version. diff --git a/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/async_client.py b/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/async_client.py index 2c3dcb110a70..f7a2ea02fc2f 100644 --- a/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/async_client.py +++ b/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/async_client.py @@ -210,7 +210,7 @@ def transport(self) -> HiveMetastoreServiceTransport: return self._client.transport @property - def api_endpoint(self): + def api_endpoint(self) -> str: """Return the API endpoint used by the client instance. Returns: diff --git a/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/client.py b/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/client.py index 02c84912fafe..f4eb3e1f5f8c 100644 --- a/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/client.py +++ b/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/client.py @@ -127,7 +127,7 @@ class HiveMetastoreServiceClient(metaclass=HiveMetastoreServiceClientMeta): """ @staticmethod - def _get_default_mtls_endpoint(api_endpoint): + def _get_default_mtls_endpoint(api_endpoint) -> Optional[str]: """Converts api endpoint to mTLS endpoint. Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to @@ -135,7 +135,7 @@ def _get_default_mtls_endpoint(api_endpoint): Args: api_endpoint (Optional[str]): the api endpoint to convert. Returns: - str: converted mTLS api endpoint. + Optional[str]: converted mTLS api endpoint. """ if not api_endpoint: return api_endpoint @@ -145,6 +145,10 @@ def _get_default_mtls_endpoint(api_endpoint): ) m = mtls_endpoint_re.match(api_endpoint) + if m is None: + # Could not parse api_endpoint; return as-is. + return api_endpoint + name, mtls, sandbox, googledomain = m.groups() if mtls or not googledomain: return api_endpoint @@ -493,7 +497,7 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): @staticmethod def _get_api_endpoint( api_override, client_cert_source, universe_domain, use_mtls_endpoint - ): + ) -> str: """Return the API endpoint used by the client. Args: @@ -590,7 +594,7 @@ def _add_cred_info_for_auth_errors( error._details.append(json.dumps(cred_info)) @property - def api_endpoint(self): + def api_endpoint(self) -> str: """Return the API endpoint used by the client instance. Returns: @@ -690,7 +694,7 @@ def __init__( self._universe_domain = HiveMetastoreServiceClient._get_universe_domain( universe_domain_opt, self._universe_domain_env ) - self._api_endpoint = None # updated below, depending on `transport` + self._api_endpoint: str = "" # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False diff --git a/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/README.rst b/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/README.rst index 0692d3def010..b67e1685bf2f 100644 --- a/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/README.rst +++ b/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/README.rst @@ -2,8 +2,9 @@ transport inheritance structure _______________________________ -`HiveMetastoreServiceTransport` is the ABC for all transports. -- public child `HiveMetastoreServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `HiveMetastoreServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseHiveMetastoreServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `HiveMetastoreServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). +``HiveMetastoreServiceTransport`` is the ABC for all transports. + +- public child ``HiveMetastoreServiceGrpcTransport`` for sync gRPC transport (defined in ``grpc.py``). +- public child ``HiveMetastoreServiceGrpcAsyncIOTransport`` for async gRPC transport (defined in ``grpc_asyncio.py``). +- private child ``_BaseHiveMetastoreServiceRestTransport`` for base REST transport with inner classes ``_BaseMETHOD`` (defined in ``rest_base.py``). +- public child ``HiveMetastoreServiceRestTransport`` for sync REST transport with inner classes ``METHOD`` derived from the parent's corresponding ``_BaseMETHOD`` classes (defined in ``rest.py``). diff --git a/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/base.py b/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/base.py index c85ccc431eae..91702b1c7685 100644 --- a/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/base.py +++ b/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/base.py @@ -84,6 +84,10 @@ def __init__( your own client library. always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. """ # Save the scopes. @@ -133,6 +137,8 @@ def __init__( host += ":443" self._host = host + self._wrapped_methods: Dict[Callable, Callable] = {} + @property def host(self): return self._host diff --git a/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/grpc.py b/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/grpc.py index 1cfce73e1de8..5506b0c8217c 100644 --- a/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/grpc.py +++ b/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/grpc.py @@ -55,7 +55,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): elif isinstance(request, google.protobuf.message.Message): request_payload = MessageToJson(request) else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + request_payload = f"{type(request).__name__}: {pickle.dumps(request)!r}" request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value @@ -90,7 +90,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): elif isinstance(result, google.protobuf.message.Message): response_payload = MessageToJson(result) else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + response_payload = f"{type(result).__name__}: {pickle.dumps(result)!r}" grpc_response = { "payload": response_payload, "metadata": metadata, @@ -198,6 +198,10 @@ def __init__( your own client library. always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport diff --git a/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/grpc_asyncio.py b/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/grpc_asyncio.py index dd18c5cb2f3f..93285b048f64 100644 --- a/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/grpc_asyncio.py @@ -61,7 +61,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request elif isinstance(request, google.protobuf.message.Message): request_payload = MessageToJson(request) else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + request_payload = f"{type(request).__name__}: {pickle.dumps(request)!r}" request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value @@ -96,7 +96,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request elif isinstance(result, google.protobuf.message.Message): response_payload = MessageToJson(result) else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + response_payload = f"{type(result).__name__}: {pickle.dumps(result)!r}" grpc_response = { "payload": response_payload, "metadata": metadata, @@ -249,6 +249,10 @@ def __init__( your own client library. always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport diff --git a/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/rest.py b/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/rest.py index 5c59ea7bc37b..ecdcb32d838f 100644 --- a/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/rest.py +++ b/packages/google-cloud-biglake-hive/google/cloud/biglake_hive_v1beta/services/hive_metastore_service/transports/rest.py @@ -1092,6 +1092,12 @@ def __init__( url_scheme: the protocol scheme for the API endpoint. Normally "https", but for testing or local servers, "http" can be specified. + interceptor (Optional[HiveMetastoreServiceRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. diff --git a/packages/google-cloud-biglake-hive/noxfile.py b/packages/google-cloud-biglake-hive/noxfile.py index 1288a3f7cd8b..2c1244dce84b 100644 --- a/packages/google-cloud-biglake-hive/noxfile.py +++ b/packages/google-cloud-biglake-hive/noxfile.py @@ -31,8 +31,6 @@ LINT_PATHS.append("samples") ALL_PYTHON = [ - "3.7", - "3.8", "3.9", "3.10", "3.11", @@ -95,8 +93,9 @@ @nox.session(python=ALL_PYTHON) def mypy(session): """Run the type checker.""" + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2579): + # use the latest version of mypy session.install( - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy "mypy<1.16.0", "types-requests", "types-protobuf", @@ -106,6 +105,8 @@ def mypy(session): "mypy", "-p", "google", + "--check-untyped-defs", + *session.posargs, ) @@ -251,32 +252,16 @@ def install_unittest_dependencies(session, *constraints): @nox.session(python=ALL_PYTHON) @nox.parametrize( "protobuf_implementation", - ["python", "upb", "cpp"], + ["python", "upb"], ) def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ( - "3.11", - "3.12", - "3.13", - "3.14", - ): - session.skip("cpp implementation is not supported in python 3.11+") - constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - # Run py.test against the unit tests. session.run( "py.test", @@ -297,7 +282,10 @@ def unit(session, protobuf_implementation): def install_systemtest_dependencies(session, *constraints): - session.install("--pre", "grpcio") + if session.python >= "3.12": + session.install("--pre", "grpcio>=1.75.1") + else: + session.install("--pre", "grpcio<=1.62.2") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -464,7 +452,7 @@ def docfx(session): @nox.session(python=PREVIEW_PYTHON_VERSION) @nox.parametrize( "protobuf_implementation", - ["python", "upb", "cpp"], + ["python", "upb"], ) def prerelease_deps(session, protobuf_implementation): """ @@ -474,16 +462,6 @@ def prerelease_deps(session, protobuf_implementation): `pip install --pre `. """ - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ( - "3.11", - "3.12", - "3.13", - "3.14", - ): - session.skip("cpp implementation is not supported in python 3.11+") - # Install all dependencies session.install("-e", ".") @@ -527,7 +505,7 @@ def prerelease_deps(session, protobuf_implementation): "google-api-core", "google-auth", "grpc-google-iam-v1", - "grpcio", + "grpcio>=1.75.1" if session.python >= "3.12" else "grpcio<=1.62.2", "grpcio-status", "protobuf", "proto-plus", @@ -622,7 +600,7 @@ def core_deps_from_source(session, protobuf_implementation): core_dependencies_from_source = [ "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", "google-api-core @ git+https://github.com/googleapis/google-cloud-python#egg=google-api-core&subdirectory=packages/google-api-core", - "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "google-auth @ git+https://github.com/googleapis/google-cloud-python#egg=google-auth&subdirectory=packages/google-auth", "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", "proto-plus @ git+https://github.com/googleapis/google-cloud-python#egg=proto-plus&subdirectory=packages/proto-plus", ] diff --git a/packages/google-cloud-biglake-hive/setup.py b/packages/google-cloud-biglake-hive/setup.py index 39ba6b7911e9..3f8105c723c2 100644 --- a/packages/google-cloud-biglake-hive/setup.py +++ b/packages/google-cloud-biglake-hive/setup.py @@ -41,7 +41,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 2.11.0, <3.0.0", # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", @@ -49,7 +49,7 @@ "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf >= 4.25.8, < 8.0.0", ] extras = {} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-biglake-hive" @@ -81,8 +81,6 @@ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", @@ -94,7 +92,7 @@ ], platforms="Posix; MacOS X; Windows", packages=packages, - python_requires=">=3.7", + python_requires=">=3.9", install_requires=dependencies, extras_require=extras, include_package_data=True, diff --git a/packages/google-cloud-biglake-hive/testing/constraints-3.7.txt b/packages/google-cloud-biglake-hive/testing/constraints-3.7.txt deleted file mode 100644 index bbf88e9745ae..000000000000 --- a/packages/google-cloud-biglake-hive/testing/constraints-3.7.txt +++ /dev/null @@ -1,14 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -# cryptography is a direct dependency of google-auth -cryptography==38.0.3 -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) -# Add the minimum supported version of grpcio to constraints files -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/packages/google-cloud-biglake-hive/testing/constraints-3.8.txt b/packages/google-cloud-biglake-hive/testing/constraints-3.8.txt deleted file mode 100644 index 7599dea499ed..000000000000 --- a/packages/google-cloud-biglake-hive/testing/constraints-3.8.txt +++ /dev/null @@ -1,10 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -# cryptography is a direct dependency of google-auth -cryptography diff --git a/packages/google-cloud-biglake-hive/testing/constraints-3.9.txt b/packages/google-cloud-biglake-hive/testing/constraints-3.9.txt index 7599dea499ed..ac3833d41b9a 100644 --- a/packages/google-cloud-biglake-hive/testing/constraints-3.9.txt +++ b/packages/google-cloud-biglake-hive/testing/constraints-3.9.txt @@ -1,10 +1,13 @@ # -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -# cryptography is a direct dependency of google-auth -cryptography +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file, +# pinning their versions to their lower bounds. +# For example, if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# then this file should have google-cloud-foo==1.14.0 +google-api-core==2.21.0 +google-auth==2.35.0 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) +# Add the minimum supported version of grpcio to constraints files +proto-plus==1.22.3 +protobuf==4.25.8 diff --git a/packages/google-cloud-biglake-hive/tests/unit/gapic/biglake_hive_v1beta/test_hive_metastore_service.py b/packages/google-cloud-biglake-hive/tests/unit/gapic/biglake_hive_v1beta/test_hive_metastore_service.py index 93338fdcfd8a..94bde6fc0d02 100644 --- a/packages/google-cloud-biglake-hive/tests/unit/gapic/biglake_hive_v1beta/test_hive_metastore_service.py +++ b/packages/google-cloud-biglake-hive/tests/unit/gapic/biglake_hive_v1beta/test_hive_metastore_service.py @@ -121,6 +121,7 @@ def test__get_default_mtls_endpoint(): sandbox_endpoint = "example.sandbox.googleapis.com" sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" non_googleapi = "api.example.com" + custom_endpoint = ".custom" assert HiveMetastoreServiceClient._get_default_mtls_endpoint(None) is None assert ( @@ -143,6 +144,10 @@ def test__get_default_mtls_endpoint(): HiveMetastoreServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi ) + assert ( + HiveMetastoreServiceClient._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) def test__read_environment_variables(): @@ -1342,11 +1347,13 @@ def test_hive_metastore_service_client_create_channel_credentials_file( ) # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object(grpc_helpers, "create_channel") as create_channel: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object(grpc_helpers, "create_channel") as create_channel, + ): creds = ga_credentials.AnonymousCredentials() file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) @@ -13420,8 +13427,9 @@ def test_create_hive_catalog_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -13561,18 +13569,20 @@ def test_create_hive_catalog_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "post_create_hive_catalog" - ) as post, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, - "post_create_hive_catalog_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_create_hive_catalog" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "post_create_hive_catalog" + ) as post, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "post_create_hive_catalog_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "pre_create_hive_catalog" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -13625,8 +13635,9 @@ def test_get_hive_catalog_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -13693,18 +13704,20 @@ def test_get_hive_catalog_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "post_get_hive_catalog" - ) as post, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, - "post_get_hive_catalog_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_get_hive_catalog" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "post_get_hive_catalog" + ) as post, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "post_get_hive_catalog_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "pre_get_hive_catalog" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -13757,8 +13770,9 @@ def test_list_hive_catalogs_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -13823,18 +13837,20 @@ def test_list_hive_catalogs_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "post_list_hive_catalogs" - ) as post, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, - "post_list_hive_catalogs_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_list_hive_catalogs" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "post_list_hive_catalogs" + ) as post, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "post_list_hive_catalogs_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "pre_list_hive_catalogs" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -13892,8 +13908,9 @@ def test_update_hive_catalog_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -14033,18 +14050,20 @@ def test_update_hive_catalog_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "post_update_hive_catalog" - ) as post, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, - "post_update_hive_catalog_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_update_hive_catalog" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "post_update_hive_catalog" + ) as post, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "post_update_hive_catalog_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "pre_update_hive_catalog" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -14097,8 +14116,9 @@ def test_delete_hive_catalog_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -14155,13 +14175,13 @@ def test_delete_hive_catalog_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_delete_hive_catalog" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "pre_delete_hive_catalog" + ) as pre, + ): pre.assert_not_called() pb_message = hive_metastore.DeleteHiveCatalogRequest.pb( hive_metastore.DeleteHiveCatalogRequest() @@ -14206,8 +14226,9 @@ def test_create_hive_database_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -14347,18 +14368,20 @@ def test_create_hive_database_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "post_create_hive_database" - ) as post, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, - "post_create_hive_database_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_create_hive_database" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "post_create_hive_database" + ) as post, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "post_create_hive_database_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "pre_create_hive_database" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -14413,8 +14436,9 @@ def test_get_hive_database_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -14481,18 +14505,20 @@ def test_get_hive_database_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "post_get_hive_database" - ) as post, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, - "post_get_hive_database_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_get_hive_database" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "post_get_hive_database" + ) as post, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "post_get_hive_database_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "pre_get_hive_database" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -14547,8 +14573,9 @@ def test_list_hive_databases_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -14611,18 +14638,20 @@ def test_list_hive_databases_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "post_list_hive_databases" - ) as post, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, - "post_list_hive_databases_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_list_hive_databases" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "post_list_hive_databases" + ) as post, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "post_list_hive_databases_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "pre_list_hive_databases" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -14682,8 +14711,9 @@ def test_update_hive_database_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -14825,18 +14855,20 @@ def test_update_hive_database_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "post_update_hive_database" - ) as post, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, - "post_update_hive_database_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_update_hive_database" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "post_update_hive_database" + ) as post, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "post_update_hive_database_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "pre_update_hive_database" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -14891,8 +14923,9 @@ def test_delete_hive_database_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -14949,13 +14982,13 @@ def test_delete_hive_database_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_delete_hive_database" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "pre_delete_hive_database" + ) as pre, + ): pre.assert_not_called() pb_message = hive_metastore.DeleteHiveDatabaseRequest.pb( hive_metastore.DeleteHiveDatabaseRequest() @@ -15000,8 +15033,9 @@ def test_create_hive_table_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -15183,18 +15217,20 @@ def test_create_hive_table_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "post_create_hive_table" - ) as post, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, - "post_create_hive_table_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_create_hive_table" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "post_create_hive_table" + ) as post, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "post_create_hive_table_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "pre_create_hive_table" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -15249,8 +15285,9 @@ def test_get_hive_table_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -15319,18 +15356,20 @@ def test_get_hive_table_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "post_get_hive_table" - ) as post, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, - "post_get_hive_table_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_get_hive_table" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "post_get_hive_table" + ) as post, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "post_get_hive_table_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "pre_get_hive_table" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -15383,8 +15422,9 @@ def test_list_hive_tables_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -15447,18 +15487,20 @@ def test_list_hive_tables_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "post_list_hive_tables" - ) as post, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, - "post_list_hive_tables_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_list_hive_tables" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "post_list_hive_tables" + ) as post, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "post_list_hive_tables_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "pre_list_hive_tables" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -15520,8 +15562,9 @@ def test_update_hive_table_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -15707,18 +15750,20 @@ def test_update_hive_table_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "post_update_hive_table" - ) as post, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, - "post_update_hive_table_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_update_hive_table" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "post_update_hive_table" + ) as post, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "post_update_hive_table_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "pre_update_hive_table" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -15773,8 +15818,9 @@ def test_delete_hive_table_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -15833,13 +15879,13 @@ def test_delete_hive_table_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_delete_hive_table" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "pre_delete_hive_table" + ) as pre, + ): pre.assert_not_called() pb_message = hive_metastore.DeleteHiveTableRequest.pb( hive_metastore.DeleteHiveTableRequest() @@ -15886,8 +15932,9 @@ def test_batch_create_partitions_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -15949,18 +15996,22 @@ def test_batch_create_partitions_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "post_batch_create_partitions" - ) as post, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, - "post_batch_create_partitions_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_batch_create_partitions" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "post_batch_create_partitions", + ) as post, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "post_batch_create_partitions_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "pre_batch_create_partitions", + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -16020,8 +16071,9 @@ def test_batch_delete_partitions_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -16080,13 +16132,14 @@ def test_batch_delete_partitions_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_batch_delete_partitions" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "pre_batch_delete_partitions", + ) as pre, + ): pre.assert_not_called() pb_message = hive_metastore.BatchDeletePartitionsRequest.pb( hive_metastore.BatchDeletePartitionsRequest() @@ -16133,8 +16186,9 @@ def test_batch_update_partitions_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -16196,18 +16250,22 @@ def test_batch_update_partitions_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "post_batch_update_partitions" - ) as post, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, - "post_batch_update_partitions_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_batch_update_partitions" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "post_batch_update_partitions", + ) as post, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "post_batch_update_partitions_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "pre_batch_update_partitions", + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -16267,8 +16325,9 @@ def test_list_partitions_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -16334,18 +16393,20 @@ def test_list_partitions_rest_interceptors(null_interceptor): ) client = HiveMetastoreServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "post_list_partitions" - ) as post, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, - "post_list_partitions_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.HiveMetastoreServiceRestInterceptor, "pre_list_partitions" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "post_list_partitions" + ) as post, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, + "post_list_partitions_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.HiveMetastoreServiceRestInterceptor, "pre_list_partitions" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -16880,11 +16941,14 @@ def test_hive_metastore_service_base_transport(): def test_hive_metastore_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.biglake_hive_v1beta.services.hive_metastore_service.transports.HiveMetastoreServiceTransport._prep_wrapped_messages" - ) as Transport: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.biglake_hive_v1beta.services.hive_metastore_service.transports.HiveMetastoreServiceTransport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.HiveMetastoreServiceTransport( @@ -16904,9 +16968,12 @@ def test_hive_metastore_service_base_transport_with_credentials_file(): def test_hive_metastore_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.biglake_hive_v1beta.services.hive_metastore_service.transports.HiveMetastoreServiceTransport._prep_wrapped_messages" - ) as Transport: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.biglake_hive_v1beta.services.hive_metastore_service.transports.HiveMetastoreServiceTransport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.HiveMetastoreServiceTransport() @@ -16984,11 +17051,12 @@ def test_hive_metastore_service_transport_auth_gdch_credentials(transport_class) def test_hive_metastore_service_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel, + ): creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) diff --git a/packages/google-cloud-biglake/.repo-metadata.json b/packages/google-cloud-biglake/.repo-metadata.json index 84b01cfad745..1cf8c92b18ea 100644 --- a/packages/google-cloud-biglake/.repo-metadata.json +++ b/packages/google-cloud-biglake/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "The BigLake API provides access to BigLake Metastore, a serverless, fully managed, and highly available metastore for open-source data that can be used for querying Apache Iceberg tables in BigQuery.", - "api_id": "biglake.googleapis.com", - "api_shortname": "biglake", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-biglake/latest", - "default_version": "v1", - "distribution_name": "google-cloud-biglake", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-biglake", - "name_pretty": "BigLake API", - "product_documentation": "https://cloud.google.com/bigquery/docs/iceberg-tables#create-using-biglake-metastore", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "The BigLake API provides access to BigLake Metastore, a serverless, fully managed, and highly available metastore for open-source data that can be used for querying Apache Iceberg tables in BigQuery.", + "api_id": "biglake.googleapis.com", + "api_shortname": "biglake", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-biglake/latest", + "default_version": "v1", + "distribution_name": "google-cloud-biglake", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-biglake", + "name_pretty": "BigLake API", + "product_documentation": "https://cloud.google.com/bigquery/docs/iceberg-tables#create-using-biglake-metastore", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-bigquery-analyticshub/.repo-metadata.json b/packages/google-cloud-bigquery-analyticshub/.repo-metadata.json index a05777bbee21..7380819f4565 100644 --- a/packages/google-cloud-bigquery-analyticshub/.repo-metadata.json +++ b/packages/google-cloud-bigquery-analyticshub/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Analytics Hub is a data exchange that allows you to efficiently and securely exchange data assets across organizations to address challenges of data reliability and cost. Curate a library of internal and external assets, including unique datasets like Google Trends, backed by the power of BigQuery.", - "api_id": "analyticshub.googleapis.com", - "api_shortname": "analyticshub", - "client_documentation": "https://cloud.google.com/python/docs/reference/analyticshub/latest", - "default_version": "v1", - "distribution_name": "google-cloud-bigquery-analyticshub", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "analyticshub", - "name_pretty": "BigQuery Analytics Hub", - "product_documentation": "https://cloud.google.com/analytics-hub", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Analytics Hub is a data exchange that allows you to efficiently and securely exchange data assets across organizations to address challenges of data reliability and cost. Curate a library of internal and external assets, including unique datasets like Google Trends, backed by the power of BigQuery.", + "api_id": "analyticshub.googleapis.com", + "api_shortname": "analyticshub", + "client_documentation": "https://cloud.google.com/python/docs/reference/analyticshub/latest", + "default_version": "v1", + "distribution_name": "google-cloud-bigquery-analyticshub", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "analyticshub", + "name_pretty": "BigQuery Analytics Hub", + "product_documentation": "https://cloud.google.com/analytics-hub", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-bigquery-biglake/.repo-metadata.json b/packages/google-cloud-bigquery-biglake/.repo-metadata.json index 79e649a1de1a..1481ade04617 100644 --- a/packages/google-cloud-bigquery-biglake/.repo-metadata.json +++ b/packages/google-cloud-bigquery-biglake/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "BigLake API", - "api_id": "biglake.googleapis.com", - "api_shortname": "biglake", - "client_documentation": "https://cloud.google.com/python/docs/reference/biglake/latest", - "default_version": "v1", - "distribution_name": "google-cloud-bigquery-biglake", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=187149&template=1019829", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "biglake", - "name_pretty": "BigLake API", - "product_documentation": "https://cloud.google.com/bigquery/docs/iceberg-tables#create-using-biglake-metastore", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "BigLake API", + "api_id": "biglake.googleapis.com", + "api_shortname": "biglake", + "client_documentation": "https://cloud.google.com/python/docs/reference/biglake/latest", + "default_version": "v1", + "distribution_name": "google-cloud-bigquery-biglake", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=187149\u0026template=1019829", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "biglake", + "name_pretty": "BigLake API", + "product_documentation": "https://cloud.google.com/bigquery/docs/iceberg-tables#create-using-biglake-metastore", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-bigquery-connection/.repo-metadata.json b/packages/google-cloud-bigquery-connection/.repo-metadata.json index d6da3f3b1ba8..c0807522c106 100644 --- a/packages/google-cloud-bigquery-connection/.repo-metadata.json +++ b/packages/google-cloud-bigquery-connection/.repo-metadata.json @@ -1,17 +1,15 @@ { - "api_description": "Manage BigQuery connections to external data sources.", - "api_id": "bigqueryconnection.googleapis.com", - "api_shortname": "bigqueryconnection", - "client_documentation": "https://cloud.google.com/python/docs/reference/bigqueryconnection/latest", - "default_version": "v1", - "distribution_name": "google-cloud-bigquery-connection", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "bigqueryconnection", - "name_pretty": "BigQuery Connection", - "product_documentation": "https://cloud.google.com/bigquery/docs/reference/bigqueryconnection", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "Manage BigQuery connections to external data sources.", + "api_id": "bigqueryconnection.googleapis.com", + "api_shortname": "bigqueryconnection", + "client_documentation": "https://cloud.google.com/python/docs/reference/bigqueryconnection/latest", + "default_version": "v1", + "distribution_name": "google-cloud-bigquery-connection", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "bigqueryconnection", + "name_pretty": "BigQuery Connection", + "product_documentation": "https://cloud.google.com/bigquery/docs/reference/bigqueryconnection", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-bigquery-data-exchange/.repo-metadata.json b/packages/google-cloud-bigquery-data-exchange/.repo-metadata.json index 129c73a2df9d..44556cbfe3aa 100644 --- a/packages/google-cloud-bigquery-data-exchange/.repo-metadata.json +++ b/packages/google-cloud-bigquery-data-exchange/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "is a data exchange that allows you to efficiently and securely exchange data assets across organizations to address challenges of data reliability and cost.", - "api_id": "analyticshub.googleapis.com", - "api_shortname": "analyticshub", - "client_documentation": "https://cloud.google.com/python/docs/reference/analyticshub/latest", - "default_version": "v1beta1", - "distribution_name": "google-cloud-bigquery-data-exchange", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "analyticshub", - "name_pretty": "BigQuery Analytics Hub", - "product_documentation": "https://cloud.google.com/bigquery/docs/analytics-hub-introduction", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "is a data exchange that allows you to efficiently and securely exchange data assets across organizations to address challenges of data reliability and cost.", + "api_id": "analyticshub.googleapis.com", + "api_shortname": "analyticshub", + "client_documentation": "https://cloud.google.com/python/docs/reference/analyticshub/latest", + "default_version": "v1beta1", + "distribution_name": "google-cloud-bigquery-data-exchange", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "analyticshub", + "name_pretty": "BigQuery Analytics Hub", + "product_documentation": "https://cloud.google.com/bigquery/docs/analytics-hub-introduction", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datapolicies/.repo-metadata.json b/packages/google-cloud-bigquery-datapolicies/.repo-metadata.json index a9c909f01210..8e53e7594712 100644 --- a/packages/google-cloud-bigquery-datapolicies/.repo-metadata.json +++ b/packages/google-cloud-bigquery-datapolicies/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Allows users to manage BigQuery data policies.", - "api_id": "bigquerydatapolicy.googleapis.com", - "api_shortname": "bigquerydatapolicy", - "client_documentation": "https://cloud.google.com/python/docs/reference/bigquerydatapolicy/latest", - "default_version": "v1", - "distribution_name": "google-cloud-bigquery-datapolicies", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "bigquerydatapolicy", - "name_pretty": "BigQuery Data Policy", - "product_documentation": "https://cloud.google.com/bigquery/docs/reference/bigquerydatapolicy/rest", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Allows users to manage BigQuery data policies.", + "api_id": "bigquerydatapolicy.googleapis.com", + "api_shortname": "bigquerydatapolicy", + "client_documentation": "https://cloud.google.com/python/docs/reference/bigquerydatapolicy/latest", + "default_version": "v1", + "distribution_name": "google-cloud-bigquery-datapolicies", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=187149\u0026template=1162659", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "bigquerydatapolicy", + "name_pretty": "BigQuery Data Policy", + "product_documentation": "https://cloud.google.com/bigquery/docs/reference/bigquerydatapolicy/rest", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/.repo-metadata.json b/packages/google-cloud-bigquery-datatransfer/.repo-metadata.json index aa973943c5a0..fa1a9e6b6dfe 100644 --- a/packages/google-cloud-bigquery-datatransfer/.repo-metadata.json +++ b/packages/google-cloud-bigquery-datatransfer/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "allows users to transfer data from partner SaaS applications to Google BigQuery on a scheduled, managed basis.", - "api_id": "bigquerydatatransfer.googleapis.com", - "api_shortname": "bigquerydatatransfer", - "client_documentation": "https://cloud.google.com/python/docs/reference/bigquerydatatransfer/latest", - "default_version": "v1", - "distribution_name": "google-cloud-bigquery-datatransfer", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "bigquerydatatransfer", - "name_pretty": "BigQuery Data Transfer", - "product_documentation": "https://cloud.google.com/bigquery/transfer/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "allows users to transfer data from partner SaaS applications to Google BigQuery on a scheduled, managed basis.", + "api_id": "bigquerydatatransfer.googleapis.com", + "api_shortname": "bigquerydatatransfer", + "client_documentation": "https://cloud.google.com/python/docs/reference/bigquerydatatransfer/latest", + "default_version": "v1", + "distribution_name": "google-cloud-bigquery-datatransfer", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "bigquerydatatransfer", + "name_pretty": "BigQuery Data Transfer", + "product_documentation": "https://cloud.google.com/bigquery/transfer/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-bigquery-logging/.repo-metadata.json b/packages/google-cloud-bigquery-logging/.repo-metadata.json index ff47e4de32b1..6f38e8ed2f30 100644 --- a/packages/google-cloud-bigquery-logging/.repo-metadata.json +++ b/packages/google-cloud-bigquery-logging/.repo-metadata.json @@ -1,15 +1,14 @@ { - "api_id": "", - "api_shortname": "bigquerylogging", - "client_documentation": "https://cloud.google.com/python/docs/reference/bigquerylogging/latest", - "default_version": "v1", - "distribution_name": "google-cloud-bigquery-logging", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "OTHER", - "name": "bigquerylogging", - "name_pretty": "BigQuery Logging Protos", - "product_documentation": "https://cloud.google.com/bigquery/docs/reference/auditlogs", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_shortname": "bigquerylogging", + "client_documentation": "https://cloud.google.com/python/docs/reference/bigquerylogging/latest", + "default_version": "v1", + "distribution_name": "google-cloud-bigquery-logging", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "OTHER", + "name": "bigquerylogging", + "name_pretty": "BigQuery Logging Protos", + "product_documentation": "https://cloud.google.com/bigquery/docs/reference/auditlogs", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-bigquery-migration/.repo-metadata.json b/packages/google-cloud-bigquery-migration/.repo-metadata.json index 1b5874f43872..a39d9734e260 100644 --- a/packages/google-cloud-bigquery-migration/.repo-metadata.json +++ b/packages/google-cloud-bigquery-migration/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_id": "bigquerymigration.googleapis.com", - "api_shortname": "bigquerymigration", - "client_documentation": "https://cloud.google.com/python/docs/reference/bigquerymigration/latest", - "default_version": "v2", - "distribution_name": "google-cloud-bigquery-migration", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "bigquerymigration", - "name_pretty": "Google BigQuery Migration", - "product_documentation": "https://cloud.google.com/bigquery/docs/reference/migration/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "The migration service, exposing apis for migration jobs operations, and\nagent management.", + "api_id": "bigquerymigration.googleapis.com", + "api_shortname": "bigquerymigration", + "client_documentation": "https://cloud.google.com/python/docs/reference/bigquerymigration/latest", + "default_version": "v2", + "distribution_name": "google-cloud-bigquery-migration", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "bigquerymigration", + "name_pretty": "Google BigQuery Migration", + "product_documentation": "https://cloud.google.com/bigquery/docs/reference/migration/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-bigquery-migration/README.rst b/packages/google-cloud-bigquery-migration/README.rst index 40289b6b39b2..0a2f6acdcbc1 100644 --- a/packages/google-cloud-bigquery-migration/README.rst +++ b/packages/google-cloud-bigquery-migration/README.rst @@ -3,7 +3,8 @@ Python Client for Google BigQuery Migration |preview| |pypi| |versions| -`Google BigQuery Migration`_: +`Google BigQuery Migration`_: The migration service, exposing apis for migration jobs operations, and +agent management. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-bigquery-migration/docs/README.rst b/packages/google-cloud-bigquery-migration/docs/README.rst index 40289b6b39b2..0a2f6acdcbc1 100644 --- a/packages/google-cloud-bigquery-migration/docs/README.rst +++ b/packages/google-cloud-bigquery-migration/docs/README.rst @@ -3,7 +3,8 @@ Python Client for Google BigQuery Migration |preview| |pypi| |versions| -`Google BigQuery Migration`_: +`Google BigQuery Migration`_: The migration service, exposing apis for migration jobs operations, and +agent management. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-bigquery-reservation/.repo-metadata.json b/packages/google-cloud-bigquery-reservation/.repo-metadata.json index b84ada7a11f9..4e4d6127cbd4 100644 --- a/packages/google-cloud-bigquery-reservation/.repo-metadata.json +++ b/packages/google-cloud-bigquery-reservation/.repo-metadata.json @@ -1,17 +1,15 @@ { - "api_description": "Modify BigQuery flat-rate reservations.", - "api_id": "bigqueryreservation.googleapis.com", - "api_shortname": "bigqueryreservation", - "client_documentation": "https://cloud.google.com/python/docs/reference/bigqueryreservation/latest", - "default_version": "v1", - "distribution_name": "google-cloud-bigquery-reservation", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "bigqueryreservation", - "name_pretty": "BigQuery Reservation", - "product_documentation": "https://cloud.google.com/bigquery/docs/reference/reservations", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "Modify BigQuery flat-rate reservations.", + "api_id": "bigqueryreservation.googleapis.com", + "api_shortname": "bigqueryreservation", + "client_documentation": "https://cloud.google.com/python/docs/reference/bigqueryreservation/latest", + "default_version": "v1", + "distribution_name": "google-cloud-bigquery-reservation", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "bigqueryreservation", + "name_pretty": "BigQuery Reservation", + "product_documentation": "https://cloud.google.com/bigquery/docs/reference/reservations", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-bigquery-storage/.repo-metadata.json b/packages/google-cloud-bigquery-storage/.repo-metadata.json index 057e002d01ce..46836980ade6 100644 --- a/packages/google-cloud-bigquery-storage/.repo-metadata.json +++ b/packages/google-cloud-bigquery-storage/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_id": "bigquerystorage.googleapis.com", - "api_shortname": "bigquerystorage", - "client_documentation": "https://cloud.google.com/python/docs/reference/bigquerystorage/latest", - "default_version": "v1", - "distribution_name": "google-cloud-bigquery-storage", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", - "language": "python", - "library_type": "GAPIC_COMBO", - "name": "bigquerystorage", - "name_pretty": "Google BigQuery Storage", - "product_documentation": "https://cloud.google.com/bigquery/docs/reference/storage/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_id": "bigquerystorage.googleapis.com", + "api_shortname": "bigquerystorage", + "client_documentation": "https://cloud.google.com/python/docs/reference/bigquerystorage/latest", + "default_version": "v1", + "distribution_name": "google-cloud-bigquery-storage", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", + "language": "python", + "library_type": "GAPIC_COMBO", + "name": "bigquerystorage", + "name_pretty": "Google BigQuery Storage", + "product_documentation": "https://cloud.google.com/bigquery/docs/reference/storage/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-bigquery/.repo-metadata.json b/packages/google-cloud-bigquery/.repo-metadata.json index a71fce9a93b4..6b55ebc67f39 100644 --- a/packages/google-cloud-bigquery/.repo-metadata.json +++ b/packages/google-cloud-bigquery/.repo-metadata.json @@ -1,18 +1,15 @@ { - "name": "bigquery", - "name_pretty": "Google Cloud BigQuery", - "product_documentation": "https://cloud.google.com/bigquery", + "api_id": "bigquery.googleapis.com", + "api_shortname": "bigquery", "client_documentation": "https://cloud.google.com/python/docs/reference/bigquery/latest", + "default_version": "v2", + "distribution_name": "google-cloud-bigquery", "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", - "release_level": "stable", "language": "python", "library_type": "GAPIC_COMBO", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-bigquery", - "api_id": "bigquery.googleapis.com", - "requires_billing": false, - "default_version": "v2", - "codeowner_team": "@googleapis/python-core-client-libraries", - "api_shortname": "bigquery", - "api_description": "is a fully managed, NoOps, low cost data analytics service.\nData can be streamed into BigQuery at millions of rows per second to enable real-time analysis.\nWith BigQuery you can easily deploy Petabyte-scale Databases." -} + "name": "bigquery", + "name_pretty": "Google Cloud BigQuery", + "product_documentation": "https://cloud.google.com/bigquery", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-bigquery/docs/README.rst b/packages/google-cloud-bigquery/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/packages/google-cloud-bigquery/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/packages/google-cloud-bigquery/docs/README.rst b/packages/google-cloud-bigquery/docs/README.rst new file mode 100644 index 000000000000..23ed9257ddc4 --- /dev/null +++ b/packages/google-cloud-bigquery/docs/README.rst @@ -0,0 +1,141 @@ +Python Client for Google BigQuery +================================= + +|GA| |pypi| |versions| + +Querying massive datasets can be time consuming and expensive without the +right hardware and infrastructure. Google `BigQuery`_ solves this problem by +enabling super-fast, SQL queries against append-mostly tables, using the +processing power of Google's infrastructure. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery.svg + :target: https://pypi.org/project/google-cloud-bigquery/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery.svg + :target: https://pypi.org/project/google-cloud-bigquery/ +.. _BigQuery: https://cloud.google.com/bigquery/what-is-bigquery +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/bigquery/latest/summary_overview +.. _Product Documentation: https://cloud.google.com/bigquery/docs/reference/v2/ + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Cloud BigQuery API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Cloud BigQuery API.: https://cloud.google.com/bigquery +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Python >= 3.9 + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python == 2.7, Python == 3.5, Python == 3.6, Python == 3.7, and Python == 3.8. + +The last version of this library compatible with Python 2.7 and 3.5 is +`google-cloud-bigquery==1.28.0`. + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-bigquery + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-bigquery + +Example Usage +------------- + +Perform a query +~~~~~~~~~~~~~~~ + +.. code:: python + + from google.cloud import bigquery + + client = bigquery.Client() + + # Perform a query. + QUERY = ( + 'SELECT name FROM `bigquery-public-data.usa_names.usa_1910_2013` ' + 'WHERE state = "TX" ' + 'LIMIT 100') + query_job = client.query(QUERY) # API request + rows = query_job.result() # Waits for query to finish + + for row in rows: + print(row.name) + +Instrumenting With OpenTelemetry +-------------------------------- + +This application uses `OpenTelemetry`_ to output tracing data from +API calls to BigQuery. To enable OpenTelemetry tracing in +the BigQuery client the following PyPI packages need to be installed: + +.. _OpenTelemetry: https://opentelemetry.io + +.. code-block:: console + + pip install google-cloud-bigquery[opentelemetry] opentelemetry-exporter-gcp-trace + +After installation, OpenTelemetry can be used in the BigQuery +client and in BigQuery jobs. First, however, an exporter must be +specified for where the trace data will be outputted to. An +example of this can be found here: + +.. code-block:: python + + from opentelemetry import trace + from opentelemetry.sdk.trace import TracerProvider + from opentelemetry.sdk.trace.export import BatchSpanProcessor + from opentelemetry.exporter.cloud_trace import CloudTraceSpanExporter + tracer_provider = TracerProvider() + tracer_provider = BatchSpanProcessor(CloudTraceSpanExporter()) + trace.set_tracer_provider(TracerProvider()) + +In this example all tracing data will be published to the Google +`Cloud Trace`_ console. For more information on OpenTelemetry, please consult the `OpenTelemetry documentation`_. + +.. _OpenTelemetry documentation: https://opentelemetry-python.readthedocs.io +.. _Cloud Trace: https://cloud.google.com/trace diff --git a/packages/google-cloud-bigtable/.repo-metadata.json b/packages/google-cloud-bigtable/.repo-metadata.json index 7c2effe06c93..544ec3b42a64 100644 --- a/packages/google-cloud-bigtable/.repo-metadata.json +++ b/packages/google-cloud-bigtable/.repo-metadata.json @@ -1,80 +1,15 @@ { - "name": "bigtable", - "name_pretty": "Cloud Bigtable", - "product_documentation": "https://cloud.google.com/bigtable", + "api_id": "bigtable.googleapis.com", + "api_shortname": "bigtable", "client_documentation": "https://cloud.google.com/python/docs/reference/bigtable/latest", + "default_version": "v2", + "distribution_name": "google-cloud-bigtable", "issue_tracker": "https://issuetracker.google.com/savedsearches/559777", - "release_level": "stable", "language": "python", "library_type": "GAPIC_COMBO", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-bigtable", - "api_id": "bigtable.googleapis.com", - "requires_billing": true, - "samples": [ - { - "name": "Hello World in Cloud Bigtable", - "description": "Demonstrates how to connect to Cloud Bigtable and run some basic operations. More information available at: https://cloud.google.com/bigtable/docs/samples-python-hello", - "file": "main.py", - "runnable": true, - "custom_content": "
usage: main.py [-h] [--table TABLE] project_id instance_id
Demonstrates how to connect to Cloud Bigtable and run some basic operations.
Prerequisites: - Create a Cloud Bigtable cluster.
https://cloud.google.com/bigtable/docs/creating-cluster - Set your Google
Application Default Credentials.
https://developers.google.com/identity/protocols/application-default-
credentials


positional arguments:
  project_id     Your Cloud Platform project ID.
  instance_id    ID of the Cloud Bigtable instance to connect to.


optional arguments:
  -h, --help     show this help message and exit
  --table TABLE  Table to create and destroy. (default: Hello-Bigtable)
", - "override_path": "hello" - }, - { - "name": "Hello World using HappyBase", - "description": "This sample demonstrates using the Google Cloud Client Library HappyBase package, an implementation of the HappyBase API to connect to and interact with Cloud Bigtable. More information available at: https://cloud.google.com/bigtable/docs/samples-python-hello-happybase", - "file": "main.py", - "runnable": true, - "custom_content": "
usage: main.py [-h] [--table TABLE] project_id instance_id
Demonstrates how to connect to Cloud Bigtable and run some basic operations.
Prerequisites: - Create a Cloud Bigtable cluster.
https://cloud.google.com/bigtable/docs/creating-cluster - Set your Google
Application Default Credentials.
https://developers.google.com/identity/protocols/application-default-
credentials


positional arguments:
  project_id     Your Cloud Platform project ID.
  instance_id    ID of the Cloud Bigtable instance to connect to.


optional arguments:
  -h, --help     show this help message and exit
  --table TABLE  Table to create and destroy. (default: Hello-Bigtable)
", - "override_path": "hello_happybase" - }, - { - "name": "cbt Command Demonstration", - "description": "This page explains how to use the cbt command to connect to a Cloud Bigtable instance, perform basic administrative tasks, and read and write data in a table. More information about this quickstart is available at https://cloud.google.com/bigtable/docs/quickstart-cbt", - "file": "instanceadmin.py", - "runnable": true, - "custom_content": "
usage: instanceadmin.py [-h] [run] [dev-instance] [del-instance] [add-cluster] [del-cluster] project_id instance_id cluster_id
Demonstrates how to connect to Cloud Bigtable and run some basic operations.
Prerequisites: - Create a Cloud Bigtable cluster.
https://cloud.google.com/bigtable/docs/creating-cluster - Set your Google
Application Default Credentials.
https://developers.google.com/identity/protocols/application-default-
credentials


positional arguments:
  project_id     Your Cloud Platform project ID.
  instance_id    ID of the Cloud Bigtable instance to connect to.


optional arguments:
  -h, --help     show this help message and exit
  --table TABLE  Table to create and destroy. (default: Hello-Bigtable)
", - "override_path": "instanceadmin" - }, - { - "name": "Metric Scaler", - "description": "This sample demonstrates how to use Stackdriver Monitoring to scale Cloud Bigtable based on CPU usage.", - "file": "metricscaler.py", - "runnable": true, - "custom_content": "
usage: metricscaler.py [-h] [--high_cpu_threshold HIGH_CPU_THRESHOLD] [--low_cpu_threshold LOW_CPU_THRESHOLD] [--short_sleep SHORT_SLEEP] [--long_sleep LONG_SLEEP] bigtable_instance bigtable_cluster
usage: metricscaler.py [-h] [--high_cpu_threshold HIGH_CPU_THRESHOLD]
                       [--low_cpu_threshold LOW_CPU_THRESHOLD]
                       [--short_sleep SHORT_SLEEP] [--long_sleep LONG_SLEEP]
                       bigtable_instance bigtable_cluster


Scales Cloud Bigtable clusters based on CPU usage.


positional arguments:
  bigtable_instance     ID of the Cloud Bigtable instance to connect to.
  bigtable_cluster      ID of the Cloud Bigtable cluster to connect to.


optional arguments:
  -h, --help            show this help message and exit
  --high_cpu_threshold HIGH_CPU_THRESHOLD
                        If Cloud Bigtable CPU usage is above this threshold,
                        scale up
  --low_cpu_threshold LOW_CPU_THRESHOLD
                        If Cloud Bigtable CPU usage is below this threshold,
                        scale down
  --short_sleep SHORT_SLEEP
                        How long to sleep in seconds between checking metrics
                        after no scale operation
  --long_sleep LONG_SLEEP
                        How long to sleep in seconds between checking metrics
                        after a scaling operation
", - "override_path": "metricscaler" - }, - { - "name": "Quickstart", - "description": "Demonstrates of Cloud Bigtable. This sample creates a Bigtable client, connects to an instance and then to a table, then closes the connection.", - "file": "main.py", - "runnable": true, - "custom_content": "
usage: main.py [-h] [--table TABLE] project_id instance_id 


positional arguments:
  project_id     Your Cloud Platform project ID.
  instance_id    ID of the Cloud Bigtable instance to connect to.


optional arguments:
  -h, --help     show this help message and exit
  --table TABLE  Existing table used in the quickstart. (default: my-table)
", - "override_path": "quickstart" - }, - { - "name": "Quickstart using HappyBase", - "description": "Demonstrates of Cloud Bigtable using HappyBase. This sample creates a Bigtable client, connects to an instance and then to a table, then closes the connection.", - "file": "main.py", - "runnable": true, - "custom_content": "
usage: main.py [-h] [--table TABLE] project_id instance_id
usage: main.py [-h] [--table TABLE] project_id instance_id


positional arguments:
  project_id     Your Cloud Platform project ID.
  instance_id    ID of the Cloud Bigtable instance to connect to.


optional arguments:
  -h, --help     show this help message and exit
  --table TABLE  Existing table used in the quickstart. (default: my-table)usage: tableadmin.py [-h] [run] [delete] [--table TABLE] project_id instance_id


Demonstrates how to connect to Cloud Bigtable and run some basic operations.
Prerequisites: - Create a Cloud Bigtable cluster.
https://cloud.google.com/bigtable/docs/creating-cluster - Set your Google
Application Default Credentials.
https://developers.google.com/identity/protocols/application-default-
credentials


positional arguments:
  project_id     Your Cloud Platform project ID.
  instance_id    ID of the Cloud Bigtable instance to connect to.


optional arguments:
  -h, --help     show this help message and exit
  --table TABLE  Table to create and destroy. (default: Hello-Bigtable)
", - "override_path": "tableadmin" - } - ], - "default_version": "v2", - "codeowner_team": "@googleapis/api-bigtable @googleapis/api-bigtable-partners", - "api_shortname": "bigtable" -} + "name": "bigtable", + "name_pretty": "Cloud Bigtable", + "product_documentation": "https://cloud.google.com/bigtable", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-bigtable/docs/README.rst b/packages/google-cloud-bigtable/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/packages/google-cloud-bigtable/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/packages/google-cloud-bigtable/docs/README.rst b/packages/google-cloud-bigtable/docs/README.rst new file mode 100644 index 000000000000..b054a1c2917c --- /dev/null +++ b/packages/google-cloud-bigtable/docs/README.rst @@ -0,0 +1,120 @@ +Python Client for Google Cloud Bigtable +======================================= + +|GA| |pypi| |versions| + +`Google Cloud Bigtable`_ is Google's NoSQL Big Data database service. It's the +same database that powers many core Google services, including Search, +Analytics, Maps, and Gmail. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigtable.svg + :target: https://pypi.org/project/google-cloud-bigtable/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigtable.svg + :target: https://pypi.org/project/google-cloud-bigtable/ +.. _Google Cloud Bigtable: https://cloud.google.com/bigtable +.. _Client Library Documentation: https://googleapis.dev/python/bigtable/latest +.. _Product Documentation: https://cloud.google.com/bigtable/docs + + +Async Data Client +------------------------- + +:code:`v2.23.0` includes a release of the new :code:`BigtableDataClientAsync` client, accessible at the import path +:code:`google.cloud.bigtable.data`. + +The new client brings a simplified API and increased performance using asyncio. +The new client is focused on the data API (i.e. reading and writing Bigtable data), with admin operations +remaining exclusively in the existing synchronous client. + +Feedback and bug reports are welcome at cbt-python-client-v3-feedback@google.com, +or through the Github `issue tracker`_. + + + .. note:: + + It is generally not recommended to use the async client in an otherwise synchronous codebase. To make use of asyncio's + performance benefits, the codebase should be designed to be async from the ground up. + + +.. _issue tracker: https://github.com/googleapis/google-cloud-python/issues + + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Cloud Bigtable API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Cloud Bigtable API.: https://cloud.google.com/bigtable +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ + +Python >= 3.7 + +Deprecated Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Python 2.7: the last released version which supported Python 2.7 was + version 1.7.0, released 2021-02-09. + +- Python 3.5: the last released version which supported Python 3.5 was + version 1.7.0, released 2021-02-09. + +- Python 3.6: the last released version which supported Python 3.6 was + version v2.10.1, released 2022-06-03. + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-bigtable + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-bigtable + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Cloud Bigtable API + to see other available methods on the client. +- Read the `Product documentation`_ to learn + more about the product and see How-to Guides. diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable/__init__.py index 7331ff24150c..8b73ec2e424f 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/__init__.py @@ -14,9 +14,8 @@ """Google Cloud Bigtable API package.""" -from google.cloud.bigtable.client import Client - from google.cloud.bigtable import gapic_version as package_version +from google.cloud.bigtable.client import Client __version__: str diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/app_profile.py b/packages/google-cloud-bigtable/google/cloud/bigtable/app_profile.py index 8cde66146f9a..979463538153 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/app_profile.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/app_profile.py @@ -17,10 +17,11 @@ import re +from google.api_core.exceptions import NotFound +from google.protobuf import field_mask_pb2 + from google.cloud.bigtable.enums import RoutingPolicyType from google.cloud.bigtable_admin_v2.types import instance -from google.protobuf import field_mask_pb2 -from google.api_core.exceptions import NotFound _APP_PROFILE_NAME_RE = re.compile( r"^projects/(?P[^/]+)/" diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/backup.py b/packages/google-cloud-bigtable/google/cloud/bigtable/backup.py index f6fa24421f02..ef3c97543818 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/backup.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/backup.py @@ -17,13 +17,14 @@ import re from google.cloud._helpers import _datetime_to_pb_timestamp # type: ignore -from google.cloud.bigtable_admin_v2 import BaseBigtableTableAdminClient -from google.cloud.bigtable_admin_v2.types import table -from google.cloud.bigtable.encryption_info import EncryptionInfo -from google.cloud.bigtable.policy import Policy from google.cloud.exceptions import NotFound # type: ignore from google.protobuf import field_mask_pb2 +from google.cloud.bigtable.encryption_info import EncryptionInfo +from google.cloud.bigtable.policy import Policy +from google.cloud.bigtable_admin_v2 import BaseBigtableTableAdminClient +from google.cloud.bigtable_admin_v2.types import table + _BACKUP_NAME_RE = re.compile( r"^projects/(?P[^/]+)/" r"instances/(?P[a-z][-a-z0-9]*)/" diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/batcher.py b/packages/google-cloud-bigtable/google/cloud/bigtable/batcher.py index f9b85386d827..bbabb6f8ece4 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/batcher.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/batcher.py @@ -13,15 +13,13 @@ # limitations under the License. """User friendly container for Google Cloud Bigtable MutationBatcher.""" -import threading -import queue -import concurrent.futures import atexit - - -from google.api_core.exceptions import from_grpc_status +import concurrent.futures from dataclasses import dataclass +import queue +import threading +from google.api_core.exceptions import from_grpc_status FLUSH_COUNT = 100 # after this many elements, send out the batch diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/client.py b/packages/google-cloud-bigtable/google/cloud/bigtable/client.py index 37de10b6e772..af5653b3dbf6 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/client.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/client.py @@ -29,31 +29,24 @@ """ import os import warnings -import grpc # type: ignore from google.api_core.gapic_v1 import client_info as client_info_lib from google.auth.credentials import AnonymousCredentials # type: ignore +from google.cloud.client import ClientWithProject # type: ignore +from google.cloud.environment_vars import BIGTABLE_EMULATOR # type: ignore +import grpc # type: ignore -from google.cloud import bigtable_v2 -from google.cloud import bigtable_admin_v2 -from google.cloud.bigtable_v2.services.bigtable.transports import BigtableGrpcTransport +from google.cloud import bigtable, bigtable_admin_v2, bigtable_v2 +from google.cloud.bigtable.cluster import _CLUSTER_NAME_RE, Cluster +from google.cloud.bigtable.instance import Instance from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin.transports import ( BigtableInstanceAdminGrpcTransport, ) from google.cloud.bigtable_admin_v2.services.bigtable_table_admin.transports import ( BigtableTableAdminGrpcTransport, ) - -from google.cloud import bigtable -from google.cloud.bigtable.instance import Instance -from google.cloud.bigtable.cluster import Cluster - -from google.cloud.client import ClientWithProject # type: ignore - from google.cloud.bigtable_admin_v2.types import instance -from google.cloud.bigtable.cluster import _CLUSTER_NAME_RE -from google.cloud.environment_vars import BIGTABLE_EMULATOR # type: ignore - +from google.cloud.bigtable_v2.services.bigtable.transports import BigtableGrpcTransport INSTANCE_TYPE_PRODUCTION = instance.Instance.Type.PRODUCTION INSTANCE_TYPE_DEVELOPMENT = instance.Instance.Type.DEVELOPMENT diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/cluster.py b/packages/google-cloud-bigtable/google/cloud/bigtable/cluster.py index 967ec707e1c3..bf91a7f3ac61 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/cluster.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/cluster.py @@ -16,10 +16,11 @@ import re -from google.cloud.bigtable_admin_v2.types import instance + from google.api_core.exceptions import NotFound from google.protobuf import field_mask_pb2 +from google.cloud.bigtable_admin_v2.types import instance _CLUSTER_NAME_RE = re.compile( r"^projects/(?P[^/]+)/" diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/column_family.py b/packages/google-cloud-bigtable/google/cloud/bigtable/column_family.py index 80232958d492..31067f8d1985 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/column_family.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/column_family.py @@ -15,12 +15,13 @@ """User friendly container for Google Cloud Bigtable Column Family.""" +from google.api_core.gapic_v1.method import DEFAULT + from google.cloud import _helpers -from google.cloud.bigtable_admin_v2.types import table as table_v2_pb2 from google.cloud.bigtable_admin_v2.types import ( bigtable_table_admin as table_admin_v2_pb2, ) -from google.api_core.gapic_v1.method import DEFAULT +from google.cloud.bigtable_admin_v2.types import table as table_v2_pb2 class GarbageCollectionRule(object): diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/__init__.py index c18eae683461..da82ae2b5aab 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/__init__.py @@ -14,56 +14,52 @@ # limitations under the License. # from google.cloud.bigtable import gapic_version as package_version - -from google.cloud.bigtable.data._async.client import BigtableDataClientAsync -from google.cloud.bigtable.data._async.client import TableAsync -from google.cloud.bigtable.data._async.client import AuthorizedViewAsync +from google.cloud.bigtable.data._async._mutate_rows import _MutateRowsOperationAsync +from google.cloud.bigtable.data._async._read_rows import _ReadRowsOperationAsync +from google.cloud.bigtable.data._async.client import ( + AuthorizedViewAsync, + BigtableDataClientAsync, + TableAsync, +) from google.cloud.bigtable.data._async.mutations_batcher import MutationsBatcherAsync -from google.cloud.bigtable.data._sync_autogen.client import BigtableDataClient -from google.cloud.bigtable.data._sync_autogen.client import Table -from google.cloud.bigtable.data._sync_autogen.client import AuthorizedView +from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data._helpers import ( + TABLE_DEFAULT, + RowKeySamples, + ShardedQuery, +) +from google.cloud.bigtable.data._sync_autogen._mutate_rows import _MutateRowsOperation +from google.cloud.bigtable.data._sync_autogen._read_rows import _ReadRowsOperation +from google.cloud.bigtable.data._sync_autogen.client import ( + AuthorizedView, + BigtableDataClient, + Table, +) from google.cloud.bigtable.data._sync_autogen.mutations_batcher import MutationsBatcher - -from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery -from google.cloud.bigtable.data.read_rows_query import RowRange -from google.cloud.bigtable.data.row import Row -from google.cloud.bigtable.data.row import Cell - -from google.cloud.bigtable.data.mutations import Mutation -from google.cloud.bigtable.data.mutations import RowMutationEntry -from google.cloud.bigtable.data.mutations import AddToCell -from google.cloud.bigtable.data.mutations import SetCell -from google.cloud.bigtable.data.mutations import DeleteRangeFromColumn -from google.cloud.bigtable.data.mutations import DeleteAllFromFamily -from google.cloud.bigtable.data.mutations import DeleteAllFromRow - -from google.cloud.bigtable.data.exceptions import InvalidChunk -from google.cloud.bigtable.data.exceptions import FailedMutationEntryError -from google.cloud.bigtable.data.exceptions import FailedQueryShardError - -from google.cloud.bigtable.data.exceptions import RetryExceptionGroup -from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup -from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup -from google.cloud.bigtable.data.exceptions import ParameterTypeInferenceFailed - -from google.cloud.bigtable.data._helpers import TABLE_DEFAULT -from google.cloud.bigtable.data._helpers import RowKeySamples -from google.cloud.bigtable.data._helpers import ShardedQuery - -# setup custom CrossSync mappings for library -from google.cloud.bigtable_v2.services.bigtable.async_client import ( - BigtableAsyncClient, +from google.cloud.bigtable.data.exceptions import ( + FailedMutationEntryError, + FailedQueryShardError, + InvalidChunk, + MutationsExceptionGroup, + ParameterTypeInferenceFailed, + RetryExceptionGroup, + ShardedReadRowsExceptionGroup, ) -from google.cloud.bigtable.data._async._read_rows import _ReadRowsOperationAsync -from google.cloud.bigtable.data._async._mutate_rows import _MutateRowsOperationAsync - -from google.cloud.bigtable_v2.services.bigtable.client import ( - BigtableClient, +from google.cloud.bigtable.data.mutations import ( + AddToCell, + DeleteAllFromFamily, + DeleteAllFromRow, + DeleteRangeFromColumn, + Mutation, + RowMutationEntry, + SetCell, ) -from google.cloud.bigtable.data._sync_autogen._read_rows import _ReadRowsOperation -from google.cloud.bigtable.data._sync_autogen._mutate_rows import _MutateRowsOperation +from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery, RowRange +from google.cloud.bigtable.data.row import Cell, Row -from google.cloud.bigtable.data._cross_sync import CrossSync +# setup custom CrossSync mappings for library +from google.cloud.bigtable_v2.services.bigtable.async_client import BigtableAsyncClient +from google.cloud.bigtable_v2.services.bigtable.client import BigtableClient CrossSync.add_mapping("GapicClient", BigtableAsyncClient) CrossSync._Sync_Impl.add_mapping("GapicClient", BigtableClient) diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/__init__.py index e13c9acb7c0e..395c314c1030 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/__init__.py @@ -12,12 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.cloud.bigtable.data._async.client import BigtableDataClientAsync -from google.cloud.bigtable.data._async.client import TableAsync - +from google.cloud.bigtable.data._async.client import BigtableDataClientAsync, TableAsync from google.cloud.bigtable.data._async.mutations_batcher import MutationsBatcherAsync - __all__ = [ "BigtableDataClientAsync", "TableAsync", diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_mutate_rows.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_mutate_rows.py index 8e6833bcafee..b1044e8792ab 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_mutate_rows.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_mutate_rows.py @@ -14,38 +14,42 @@ # from __future__ import annotations -from typing import Sequence, TYPE_CHECKING +from typing import TYPE_CHECKING, Sequence from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries -import google.cloud.bigtable_v2.types.bigtable as types_pb + +from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data._helpers import ( + _attempt_timeout_generator, + _retry_exception_factory, +) import google.cloud.bigtable.data.exceptions as bt_exceptions -from google.cloud.bigtable.data._helpers import _attempt_timeout_generator -from google.cloud.bigtable.data._helpers import _retry_exception_factory # mutate_rows requests are limited to this number of mutations -from google.cloud.bigtable.data.mutations import _MUTATE_ROWS_REQUEST_MUTATION_LIMIT -from google.cloud.bigtable.data.mutations import _EntryWithProto - -from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data.mutations import ( + _MUTATE_ROWS_REQUEST_MUTATION_LIMIT, + _EntryWithProto, +) +import google.cloud.bigtable_v2.types.bigtable as types_pb if TYPE_CHECKING: from google.cloud.bigtable.data.mutations import RowMutationEntry if CrossSync.is_async: + from google.cloud.bigtable.data._async.client import ( + _DataApiTargetAsync as TargetType, + ) # type: ignore from google.cloud.bigtable_v2.services.bigtable.async_client import ( BigtableAsyncClient as GapicClientType, ) - from google.cloud.bigtable.data._async.client import ( # type: ignore - _DataApiTargetAsync as TargetType, - ) else: - from google.cloud.bigtable_v2.services.bigtable.client import ( # type: ignore - BigtableClient as GapicClientType, - ) - from google.cloud.bigtable.data._sync_autogen.client import ( # type: ignore + from google.cloud.bigtable.data._sync_autogen.client import ( _DataApiTarget as TargetType, - ) + ) # type: ignore + from google.cloud.bigtable_v2.services.bigtable.client import ( + BigtableClient as GapicClientType, + ) # type: ignore __CROSS_SYNC_OUTPUT__ = "google.cloud.bigtable.data._sync_autogen._mutate_rows" diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_read_rows.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_read_rows.py index 8787bfa71411..30acdbdf12bd 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_read_rows.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_read_rows.py @@ -15,25 +15,27 @@ from __future__ import annotations -from typing import Sequence, TYPE_CHECKING - -from google.cloud.bigtable_v2.types import ReadRowsRequest as ReadRowsRequestPB -from google.cloud.bigtable_v2.types import ReadRowsResponse as ReadRowsResponsePB -from google.cloud.bigtable_v2.types import RowSet as RowSetPB -from google.cloud.bigtable_v2.types import RowRange as RowRangePB - -from google.cloud.bigtable.data.row import Row, Cell -from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery -from google.cloud.bigtable.data.exceptions import InvalidChunk -from google.cloud.bigtable.data.exceptions import _RowSetComplete -from google.cloud.bigtable.data.exceptions import _ResetRow -from google.cloud.bigtable.data._helpers import _attempt_timeout_generator -from google.cloud.bigtable.data._helpers import _retry_exception_factory +from typing import TYPE_CHECKING, Sequence from google.api_core import retry as retries from google.api_core.retry import exponential_sleep_generator from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data._helpers import ( + _attempt_timeout_generator, + _retry_exception_factory, +) +from google.cloud.bigtable.data.exceptions import ( + InvalidChunk, + _ResetRow, + _RowSetComplete, +) +from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery +from google.cloud.bigtable.data.row import Cell, Row +from google.cloud.bigtable_v2.types import ReadRowsRequest as ReadRowsRequestPB +from google.cloud.bigtable_v2.types import RowRange as RowRangePB +from google.cloud.bigtable_v2.types import RowSet as RowSetPB +from google.cloud.bigtable_v2.types import ReadRowsResponse as ReadRowsResponsePB if TYPE_CHECKING: if CrossSync.is_async: @@ -41,7 +43,9 @@ _DataApiTargetAsync as TargetType, ) else: - from google.cloud.bigtable.data._sync_autogen.client import _DataApiTarget as TargetType # type: ignore + from google.cloud.bigtable.data._sync_autogen.client import ( + _DataApiTarget as TargetType, + ) # type: ignore __CROSS_SYNC_OUTPUT__ = "google.cloud.bigtable.data._sync_autogen._read_rows" diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_swappable_channel.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_swappable_channel.py index bbc9a0d47ec1..d65f833a8f9d 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_swappable_channel.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/_swappable_channel.py @@ -16,10 +16,10 @@ from typing import Callable -from google.cloud.bigtable.data._cross_sync import CrossSync - from grpc import ChannelConnectivity +from google.cloud.bigtable.data._cross_sync import CrossSync + if CrossSync.is_async: from grpc.aio import Channel else: diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/client.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/client.py index f86c886f032f..fc8653b0e8ba 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/client.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/client.py @@ -15,115 +15,126 @@ from __future__ import annotations +import abc +import concurrent.futures +from functools import partial +import os +import random +import time from typing import ( - cast, + TYPE_CHECKING, Any, AsyncIterable, Callable, Optional, - Set, Sequence, - TYPE_CHECKING, + Set, + cast, ) - -import abc -import time import warnings -import random -import os -import concurrent.futures -from functools import partial +from google.api_core import client_options as client_options_lib +from google.api_core import retry as retries +from google.api_core.exceptions import ( + Aborted, + Cancelled, + DeadlineExceeded, + ServiceUnavailable, +) +import google.auth._default +import google.auth.credentials +from google.cloud.client import ClientWithProject +from google.cloud.environment_vars import BIGTABLE_EMULATOR # type: ignore +from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper +from google.protobuf.message import Message from grpc import Channel -from google.cloud.bigtable.data.execute_query.values import ExecuteQueryValueType -from google.cloud.bigtable.data.execute_query.metadata import ( - SqlType, - _pb_metadata_to_metadata_types, +from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT +from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data._helpers import ( + _CONCURRENCY_LIMIT, + TABLE_DEFAULT, + _align_timeouts, + _attempt_timeout_generator, + _get_error_type, + _get_retryable_errors, + _get_timeouts, + _retry_exception_factory, + _validate_timeouts, + _WarmedInstanceKey, +) +from google.cloud.bigtable.data._metrics import BigtableClientSideMetricsController +from google.cloud.bigtable.data.exceptions import ( + FailedQueryShardError, + ShardedReadRowsExceptionGroup, ) from google.cloud.bigtable.data.execute_query._parameters_formatting import ( _format_execute_query_params, _to_param_types, ) -from google.cloud.bigtable_v2.services.bigtable.transports.base import ( - DEFAULT_CLIENT_INFO, +from google.cloud.bigtable.data.execute_query.metadata import ( + SqlType, + _pb_metadata_to_metadata_types, ) -from google.cloud.bigtable_v2.types.bigtable import PingAndWarmRequest -from google.cloud.bigtable_v2.types.bigtable import SampleRowKeysRequest -from google.cloud.bigtable_v2.types.bigtable import MutateRowRequest -from google.cloud.bigtable_v2.types.bigtable import CheckAndMutateRowRequest -from google.cloud.bigtable_v2.types.bigtable import ReadModifyWriteRowRequest -from google.cloud.client import ClientWithProject -from google.cloud.environment_vars import BIGTABLE_EMULATOR # type: ignore -from google.api_core import retry as retries -from google.api_core.exceptions import DeadlineExceeded -from google.api_core.exceptions import ServiceUnavailable -from google.api_core.exceptions import Aborted -from google.api_core.exceptions import Cancelled -from google.protobuf.message import Message -from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper - -import google.auth.credentials -import google.auth._default -from google.api_core import client_options as client_options_lib -from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT -from google.cloud.bigtable.data.row import Row -from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery -from google.cloud.bigtable.data.exceptions import FailedQueryShardError -from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup - -from google.cloud.bigtable.data._helpers import TABLE_DEFAULT, _align_timeouts -from google.cloud.bigtable.data._helpers import _WarmedInstanceKey -from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT -from google.cloud.bigtable.data._helpers import _retry_exception_factory -from google.cloud.bigtable.data._helpers import _validate_timeouts -from google.cloud.bigtable.data._helpers import _get_error_type -from google.cloud.bigtable.data._helpers import _get_retryable_errors -from google.cloud.bigtable.data._helpers import _get_timeouts -from google.cloud.bigtable.data._helpers import _attempt_timeout_generator +from google.cloud.bigtable.data.execute_query.values import ExecuteQueryValueType from google.cloud.bigtable.data.mutations import Mutation, RowMutationEntry - from google.cloud.bigtable.data.read_modify_write_rules import ReadModifyWriteRule -from google.cloud.bigtable.data.row_filters import RowFilter -from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter -from google.cloud.bigtable.data.row_filters import CellsRowLimitFilter -from google.cloud.bigtable.data.row_filters import RowFilterChain -from google.cloud.bigtable.data._metrics import BigtableClientSideMetricsController - -from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery +from google.cloud.bigtable.data.row import Row +from google.cloud.bigtable.data.row_filters import ( + CellsRowLimitFilter, + RowFilter, + RowFilterChain, + StripValueTransformerFilter, +) +from google.cloud.bigtable_v2.services.bigtable.transports.base import ( + DEFAULT_CLIENT_INFO, +) +from google.cloud.bigtable_v2.types.bigtable import ( + CheckAndMutateRowRequest, + MutateRowRequest, + PingAndWarmRequest, + ReadModifyWriteRowRequest, + SampleRowKeysRequest, +) if CrossSync.is_async: from grpc.aio import insecure_channel - from google.cloud.bigtable_v2.services.bigtable.transports import ( - BigtableGrpcAsyncIOTransport as TransportType, - ) - from google.cloud.bigtable_v2.services.bigtable import ( - BigtableAsyncClient as GapicClient, - ) - from google.cloud.bigtable.data._async.mutations_batcher import _MB_SIZE + from google.cloud.bigtable.data._async._swappable_channel import ( AsyncSwappableChannel as SwappableChannelType, ) from google.cloud.bigtable.data._async.metrics_interceptor import ( AsyncBigtableMetricsInterceptor as MetricsInterceptorType, ) + from google.cloud.bigtable.data._async.mutations_batcher import _MB_SIZE + from google.cloud.bigtable_v2.services.bigtable import ( + BigtableAsyncClient as GapicClient, + ) + from google.cloud.bigtable_v2.services.bigtable.transports import ( + BigtableGrpcAsyncIOTransport as TransportType, + ) else: from typing import Iterable # noqa: F401 - from grpc import insecure_channel - from grpc import intercept_channel - from google.cloud.bigtable_v2.services.bigtable.transports import BigtableGrpcTransport as TransportType # type: ignore - from google.cloud.bigtable_v2.services.bigtable import BigtableClient as GapicClient # type: ignore - from google.cloud.bigtable.data._sync_autogen.mutations_batcher import _MB_SIZE - from google.cloud.bigtable.data._sync_autogen._swappable_channel import ( # noqa: F401 + + from grpc import insecure_channel, intercept_channel + + from google.cloud.bigtable.data._sync_autogen._swappable_channel import ( SwappableChannel as SwappableChannelType, - ) - from google.cloud.bigtable.data._sync_autogen.metrics_interceptor import ( # noqa: F401 + ) # noqa: F401 + from google.cloud.bigtable.data._sync_autogen.metrics_interceptor import ( BigtableMetricsInterceptor as MetricsInterceptorType, - ) + ) # noqa: F401 + from google.cloud.bigtable.data._sync_autogen.mutations_batcher import _MB_SIZE + from google.cloud.bigtable_v2.services.bigtable import ( + BigtableClient as GapicClient, + ) # type: ignore + from google.cloud.bigtable_v2.services.bigtable.transports import ( + BigtableGrpcTransport as TransportType, + ) # type: ignore if TYPE_CHECKING: - from google.cloud.bigtable.data._helpers import RowKeySamples - from google.cloud.bigtable.data._helpers import ShardedQuery + from google.cloud.bigtable.data._helpers import RowKeySamples, ShardedQuery if CrossSync.is_async: from google.cloud.bigtable.data._async.mutations_batcher import ( @@ -133,12 +144,12 @@ ExecuteQueryIteratorAsync, ) else: - from google.cloud.bigtable.data._sync_autogen.mutations_batcher import ( # noqa: F401 + from google.cloud.bigtable.data._sync_autogen.mutations_batcher import ( MutationsBatcher, - ) - from google.cloud.bigtable.data.execute_query._sync_autogen.execute_query_iterator import ( # noqa: F401 + ) # noqa: F401 + from google.cloud.bigtable.data.execute_query._sync_autogen.execute_query_iterator import ( ExecuteQueryIterator, - ) + ) # noqa: F401 __CROSS_SYNC_OUTPUT__ = "google.cloud.bigtable.data._sync_autogen.client" diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/metrics_interceptor.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/metrics_interceptor.py index 249dcdcc97c4..43d25b496941 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/metrics_interceptor.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/metrics_interceptor.py @@ -13,24 +13,25 @@ # limitations under the License. from __future__ import annotations -from typing import Sequence - -import time from functools import wraps - -from google.cloud.bigtable.data._metrics.data_model import ActiveOperationMetric -from google.cloud.bigtable.data._metrics.data_model import OperationState -from google.cloud.bigtable.data._metrics.data_model import OperationType +import time +from typing import Sequence from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data._metrics.data_model import ( + ActiveOperationMetric, + OperationState, + OperationType, +) if CrossSync.is_async: - from grpc.aio import UnaryUnaryClientInterceptor - from grpc.aio import UnaryStreamClientInterceptor - from grpc.aio import AioRpcError + from grpc.aio import ( + AioRpcError, + UnaryStreamClientInterceptor, + UnaryUnaryClientInterceptor, + ) else: - from grpc import UnaryUnaryClientInterceptor - from grpc import UnaryStreamClientInterceptor + from grpc import UnaryStreamClientInterceptor, UnaryUnaryClientInterceptor __CROSS_SYNC_OUTPUT__ = "google.cloud.bigtable.data._sync_autogen.metrics_interceptor" diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/mutations_batcher.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/mutations_batcher.py index a8e99ea9e91b..e896def6a01e 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/mutations_batcher.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/mutations_batcher.py @@ -14,24 +14,26 @@ # from __future__ import annotations -from typing import Sequence, TYPE_CHECKING, cast import atexit -import warnings from collections import deque import concurrent.futures +from typing import TYPE_CHECKING, Sequence, cast +import warnings -from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup -from google.cloud.bigtable.data.exceptions import FailedMutationEntryError -from google.cloud.bigtable.data._helpers import _get_retryable_errors -from google.cloud.bigtable.data._helpers import _get_timeouts -from google.cloud.bigtable.data._helpers import TABLE_DEFAULT - +from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data._helpers import ( + TABLE_DEFAULT, + _get_retryable_errors, + _get_timeouts, +) +from google.cloud.bigtable.data.exceptions import ( + FailedMutationEntryError, + MutationsExceptionGroup, +) from google.cloud.bigtable.data.mutations import ( _MUTATE_ROWS_REQUEST_MUTATION_LIMIT, + Mutation, ) -from google.cloud.bigtable.data.mutations import Mutation - -from google.cloud.bigtable.data._cross_sync import CrossSync if TYPE_CHECKING: from google.cloud.bigtable.data.mutations import RowMutationEntry @@ -41,7 +43,9 @@ _DataApiTargetAsync as TargetType, ) else: - from google.cloud.bigtable.data._sync_autogen.client import _DataApiTarget as TargetType # type: ignore + from google.cloud.bigtable.data._sync_autogen.client import ( + _DataApiTarget as TargetType, + ) # type: ignore __CROSS_SYNC_OUTPUT__ = "google.cloud.bigtable.data._sync_autogen.mutations_batcher" diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/__init__.py index 77a9ddae9d38..a392baa16709 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/__init__.py @@ -14,7 +14,6 @@ from .cross_sync import CrossSync - __all__ = [ "CrossSync", ] diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/_decorators.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/_decorators.py index a0dd140dd01d..90c7aca05dc7 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/_decorators.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/_decorators.py @@ -16,11 +16,12 @@ Each AstDecorator class is used through @CrossSync. """ from __future__ import annotations + from typing import TYPE_CHECKING, Iterable if TYPE_CHECKING: import ast - from typing import Callable, Any + from typing import Any, Callable class AstDecorator: diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/_mapping_meta.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/_mapping_meta.py index 5312708ccc46..4e9324d79a59 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/_mapping_meta.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/_mapping_meta.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. from __future__ import annotations + from typing import Any diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/cross_sync.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/cross_sync.py index 1f1ee111aee9..4e97b80bdc27 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/cross_sync.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_cross_sync/cross_sync.py @@ -38,34 +38,29 @@ async def async_func(self, arg: int) -> int: from __future__ import annotations +import asyncio +import concurrent.futures +import queue +import sys +import threading +import time +import typing from typing import ( - TypeVar, + TYPE_CHECKING, Any, + AsyncGenerator, + AsyncIterable, + AsyncIterator, Callable, Coroutine, Sequence, + TypeVar, Union, - AsyncIterable, - AsyncIterator, - AsyncGenerator, - TYPE_CHECKING, ) -import typing -import asyncio -import sys -import concurrent.futures import google.api_core.retry as retries -import queue -import threading -import time -from ._decorators import ( - ConvertClass, - Convert, - Drop, - Pytest, - PytestFixture, -) + +from ._decorators import Convert, ConvertClass, Drop, Pytest, PytestFixture from ._mapping_meta import MappingMeta if TYPE_CHECKING: diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_helpers.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_helpers.py index e848ebc6f1dc..595dc85b06e9 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_helpers.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_helpers.py @@ -16,19 +16,20 @@ """ from __future__ import annotations -from typing import Sequence, List, Tuple, TYPE_CHECKING, Union -import time -import enum from collections import namedtuple -from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery +import enum +import time +from typing import TYPE_CHECKING, List, Sequence, Tuple, Union from google.api_core import exceptions as core_exceptions -from google.api_core.retry import exponential_sleep_generator -from google.api_core.retry import RetryFailureReason +from google.api_core.retry import RetryFailureReason, exponential_sleep_generator + from google.cloud.bigtable.data.exceptions import RetryExceptionGroup +from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery if TYPE_CHECKING: import grpc + from google.cloud.bigtable.data._async.client import _DataApiTargetAsync from google.cloud.bigtable.data._sync_autogen.client import _DataApiTarget diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/__init__.py index 26cfc1326bbe..77b8580bc524 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/__init__.py @@ -11,16 +11,17 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from google.cloud.bigtable.data._metrics.data_model import ( + ActiveAttemptMetric, + ActiveOperationMetric, + CompletedAttemptMetric, + CompletedOperationMetric, + OperationState, + OperationType, +) from google.cloud.bigtable.data._metrics.metrics_controller import ( BigtableClientSideMetricsController, ) - -from google.cloud.bigtable.data._metrics.data_model import ActiveOperationMetric -from google.cloud.bigtable.data._metrics.data_model import ActiveAttemptMetric -from google.cloud.bigtable.data._metrics.data_model import CompletedOperationMetric -from google.cloud.bigtable.data._metrics.data_model import CompletedAttemptMetric -from google.cloud.bigtable.data._metrics.data_model import OperationState -from google.cloud.bigtable.data._metrics.data_model import OperationType from google.cloud.bigtable.data._metrics.tracked_retry import tracked_retry __all__ = ( diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/data_model.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/data_model.py index 64dd63bfa32e..c31f1a479e17 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/data_model.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/data_model.py @@ -13,25 +13,22 @@ # limitations under the License. from __future__ import annotations -from typing import ClassVar, Tuple, cast, TYPE_CHECKING - -import time -import re -import logging import contextvars - +from dataclasses import dataclass, field from enum import Enum from functools import lru_cache -from dataclasses import dataclass -from dataclasses import field -from grpc import StatusCode -from grpc import RpcError +import logging +import re +import time +from typing import TYPE_CHECKING, ClassVar, Tuple, cast + +from google.protobuf.message import DecodeError +from grpc import RpcError, StatusCode from grpc.aio import AioRpcError +from google.cloud.bigtable.data._helpers import TrackedBackoffGenerator import google.cloud.bigtable.data.exceptions as bt_exceptions from google.cloud.bigtable_v2.types.response_params import ResponseParams -from google.cloud.bigtable.data._helpers import TrackedBackoffGenerator -from google.protobuf.message import DecodeError if TYPE_CHECKING: from google.cloud.bigtable.data._metrics.handlers._base import MetricsHandler diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/handlers/_base.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/handlers/_base.py index 884091fddf54..020e3943d23f 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/handlers/_base.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/handlers/_base.py @@ -11,9 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from google.cloud.bigtable.data._metrics.data_model import ActiveOperationMetric -from google.cloud.bigtable.data._metrics.data_model import CompletedAttemptMetric -from google.cloud.bigtable.data._metrics.data_model import CompletedOperationMetric +from google.cloud.bigtable.data._metrics.data_model import ( + ActiveOperationMetric, + CompletedAttemptMetric, + CompletedOperationMetric, +) class MetricsHandler: diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/metrics_controller.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/metrics_controller.py index e9815f201930..2ee1d9eb07e8 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/metrics_controller.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/metrics_controller.py @@ -13,9 +13,11 @@ # limitations under the License. from __future__ import annotations -from google.cloud.bigtable.data._metrics.data_model import ActiveOperationMetric +from google.cloud.bigtable.data._metrics.data_model import ( + ActiveOperationMetric, + OperationType, +) from google.cloud.bigtable.data._metrics.handlers._base import MetricsHandler -from google.cloud.bigtable.data._metrics.data_model import OperationType class BigtableClientSideMetricsController: diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/tracked_retry.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/tracked_retry.py index 94d2e5dcb66d..ed2bc8174d32 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/tracked_retry.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_metrics/tracked_retry.py @@ -23,14 +23,13 @@ from typing import Callable, List, Optional, Tuple, TypeVar -from grpc import StatusCode from google.api_core.exceptions import GoogleAPICallError from google.api_core.retry import RetryFailureReason -from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete -from google.cloud.bigtable.data._helpers import _retry_exception_factory -from google.cloud.bigtable.data._metrics import ActiveOperationMetric -from google.cloud.bigtable.data._metrics import OperationState +from grpc import StatusCode +from google.cloud.bigtable.data._helpers import _retry_exception_factory +from google.cloud.bigtable.data._metrics import ActiveOperationMetric, OperationState +from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete T = TypeVar("T") diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_mutate_rows.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_mutate_rows.py index 3bf7b562f1db..48b7f40a0ae4 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_mutate_rows.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_mutate_rows.py @@ -16,25 +16,32 @@ # This file is automatically generated by CrossSync. Do not edit manually. from __future__ import annotations -from typing import Sequence, TYPE_CHECKING + +from typing import TYPE_CHECKING, Sequence + from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries -import google.cloud.bigtable_v2.types.bigtable as types_pb -import google.cloud.bigtable.data.exceptions as bt_exceptions -from google.cloud.bigtable.data._helpers import _attempt_timeout_generator -from google.cloud.bigtable.data._helpers import _retry_exception_factory -from google.cloud.bigtable.data.mutations import _MUTATE_ROWS_REQUEST_MUTATION_LIMIT -from google.cloud.bigtable.data.mutations import _EntryWithProto + from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data._helpers import ( + _attempt_timeout_generator, + _retry_exception_factory, +) +import google.cloud.bigtable.data.exceptions as bt_exceptions +from google.cloud.bigtable.data.mutations import ( + _MUTATE_ROWS_REQUEST_MUTATION_LIMIT, + _EntryWithProto, +) +import google.cloud.bigtable_v2.types.bigtable as types_pb if TYPE_CHECKING: + from google.cloud.bigtable.data._sync_autogen.client import ( + _DataApiTarget as TargetType, + ) from google.cloud.bigtable.data.mutations import RowMutationEntry from google.cloud.bigtable_v2.services.bigtable.client import ( BigtableClient as GapicClientType, ) - from google.cloud.bigtable.data._sync_autogen.client import ( - _DataApiTarget as TargetType, - ) class _MutateRowsOperation: diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_read_rows.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_read_rows.py index 3593475a98d2..822db2c9eaa7 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_read_rows.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_read_rows.py @@ -17,21 +17,28 @@ # This file is automatically generated by CrossSync. Do not edit manually. from __future__ import annotations -from typing import Sequence, TYPE_CHECKING -from google.cloud.bigtable_v2.types import ReadRowsRequest as ReadRowsRequestPB -from google.cloud.bigtable_v2.types import ReadRowsResponse as ReadRowsResponsePB -from google.cloud.bigtable_v2.types import RowSet as RowSetPB -from google.cloud.bigtable_v2.types import RowRange as RowRangePB -from google.cloud.bigtable.data.row import Row, Cell -from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery -from google.cloud.bigtable.data.exceptions import InvalidChunk -from google.cloud.bigtable.data.exceptions import _RowSetComplete -from google.cloud.bigtable.data.exceptions import _ResetRow -from google.cloud.bigtable.data._helpers import _attempt_timeout_generator -from google.cloud.bigtable.data._helpers import _retry_exception_factory + +from typing import TYPE_CHECKING, Sequence + from google.api_core import retry as retries from google.api_core.retry import exponential_sleep_generator + from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data._helpers import ( + _attempt_timeout_generator, + _retry_exception_factory, +) +from google.cloud.bigtable.data.exceptions import ( + InvalidChunk, + _ResetRow, + _RowSetComplete, +) +from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery +from google.cloud.bigtable.data.row import Cell, Row +from google.cloud.bigtable_v2.types import ReadRowsRequest as ReadRowsRequestPB +from google.cloud.bigtable_v2.types import RowRange as RowRangePB +from google.cloud.bigtable_v2.types import RowSet as RowSetPB +from google.cloud.bigtable_v2.types import ReadRowsResponse as ReadRowsResponsePB if TYPE_CHECKING: from google.cloud.bigtable.data._sync_autogen.client import ( diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_swappable_channel.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_swappable_channel.py index 78ba129d98c5..93f2b44a6df6 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_swappable_channel.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/_swappable_channel.py @@ -16,9 +16,10 @@ # This file is automatically generated by CrossSync. Do not edit manually. from __future__ import annotations + from typing import Callable -from grpc import ChannelConnectivity -from grpc import Channel + +from grpc import Channel, ChannelConnectivity class _WrappedChannel(Channel): diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/client.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/client.py index 62200276380a..a90d5c9c8a8c 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/client.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/client.py @@ -17,84 +17,94 @@ # This file is automatically generated by CrossSync. Do not edit manually. from __future__ import annotations -from typing import cast, Any, Callable, Optional, Set, Sequence, TYPE_CHECKING + import abc -import time -import warnings -import random -import os import concurrent.futures from functools import partial -from grpc import Channel -from google.cloud.bigtable.data.execute_query.values import ExecuteQueryValueType -from google.cloud.bigtable.data.execute_query.metadata import ( - SqlType, - _pb_metadata_to_metadata_types, -) -from google.cloud.bigtable.data.execute_query._parameters_formatting import ( - _format_execute_query_params, - _to_param_types, -) -from google.cloud.bigtable_v2.services.bigtable.transports.base import ( - DEFAULT_CLIENT_INFO, +import os +import random +import time +from typing import TYPE_CHECKING, Any, Callable, Iterable, Optional, Sequence, Set, cast +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import retry as retries +from google.api_core.exceptions import ( + Aborted, + Cancelled, + DeadlineExceeded, + ServiceUnavailable, ) -from google.cloud.bigtable_v2.types.bigtable import PingAndWarmRequest -from google.cloud.bigtable_v2.types.bigtable import SampleRowKeysRequest -from google.cloud.bigtable_v2.types.bigtable import MutateRowRequest -from google.cloud.bigtable_v2.types.bigtable import CheckAndMutateRowRequest -from google.cloud.bigtable_v2.types.bigtable import ReadModifyWriteRowRequest +import google.auth._default +import google.auth.credentials from google.cloud.client import ClientWithProject from google.cloud.environment_vars import BIGTABLE_EMULATOR -from google.api_core import retry as retries -from google.api_core.exceptions import DeadlineExceeded -from google.api_core.exceptions import ServiceUnavailable -from google.api_core.exceptions import Aborted -from google.api_core.exceptions import Cancelled -from google.protobuf.message import Message from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper -import google.auth.credentials -import google.auth._default -from google.api_core import client_options as client_options_lib +from google.protobuf.message import Message +from grpc import Channel, insecure_channel, intercept_channel + from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT -from google.cloud.bigtable.data.row import Row -from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery -from google.cloud.bigtable.data.exceptions import FailedQueryShardError -from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup -from google.cloud.bigtable.data._helpers import TABLE_DEFAULT, _align_timeouts -from google.cloud.bigtable.data._helpers import _WarmedInstanceKey -from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT -from google.cloud.bigtable.data._helpers import _retry_exception_factory -from google.cloud.bigtable.data._helpers import _validate_timeouts -from google.cloud.bigtable.data._helpers import _get_error_type -from google.cloud.bigtable.data._helpers import _get_retryable_errors -from google.cloud.bigtable.data._helpers import _get_timeouts -from google.cloud.bigtable.data._helpers import _attempt_timeout_generator -from google.cloud.bigtable.data.mutations import Mutation, RowMutationEntry -from google.cloud.bigtable.data.read_modify_write_rules import ReadModifyWriteRule -from google.cloud.bigtable.data.row_filters import RowFilter -from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter -from google.cloud.bigtable.data.row_filters import CellsRowLimitFilter -from google.cloud.bigtable.data.row_filters import RowFilterChain -from google.cloud.bigtable.data._metrics import BigtableClientSideMetricsController from google.cloud.bigtable.data._cross_sync import CrossSync -from typing import Iterable -from grpc import insecure_channel -from grpc import intercept_channel -from google.cloud.bigtable_v2.services.bigtable.transports import ( - BigtableGrpcTransport as TransportType, +from google.cloud.bigtable.data._helpers import ( + _CONCURRENCY_LIMIT, + TABLE_DEFAULT, + _align_timeouts, + _attempt_timeout_generator, + _get_error_type, + _get_retryable_errors, + _get_timeouts, + _retry_exception_factory, + _validate_timeouts, + _WarmedInstanceKey, ) -from google.cloud.bigtable_v2.services.bigtable import BigtableClient as GapicClient -from google.cloud.bigtable.data._sync_autogen.mutations_batcher import _MB_SIZE +from google.cloud.bigtable.data._metrics import BigtableClientSideMetricsController from google.cloud.bigtable.data._sync_autogen._swappable_channel import ( SwappableChannel as SwappableChannelType, ) from google.cloud.bigtable.data._sync_autogen.metrics_interceptor import ( BigtableMetricsInterceptor as MetricsInterceptorType, ) +from google.cloud.bigtable.data._sync_autogen.mutations_batcher import _MB_SIZE +from google.cloud.bigtable.data.exceptions import ( + FailedQueryShardError, + ShardedReadRowsExceptionGroup, +) +from google.cloud.bigtable.data.execute_query._parameters_formatting import ( + _format_execute_query_params, + _to_param_types, +) +from google.cloud.bigtable.data.execute_query.metadata import ( + SqlType, + _pb_metadata_to_metadata_types, +) +from google.cloud.bigtable.data.execute_query.values import ExecuteQueryValueType +from google.cloud.bigtable.data.mutations import Mutation, RowMutationEntry +from google.cloud.bigtable.data.read_modify_write_rules import ReadModifyWriteRule +from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery +from google.cloud.bigtable.data.row import Row +from google.cloud.bigtable.data.row_filters import ( + CellsRowLimitFilter, + RowFilter, + RowFilterChain, + StripValueTransformerFilter, +) +from google.cloud.bigtable_v2.services.bigtable import BigtableClient as GapicClient +from google.cloud.bigtable_v2.services.bigtable.transports import ( + BigtableGrpcTransport as TransportType, +) +from google.cloud.bigtable_v2.services.bigtable.transports.base import ( + DEFAULT_CLIENT_INFO, +) +from google.cloud.bigtable_v2.types.bigtable import ( + CheckAndMutateRowRequest, + MutateRowRequest, + PingAndWarmRequest, + ReadModifyWriteRowRequest, + SampleRowKeysRequest, +) if TYPE_CHECKING: - from google.cloud.bigtable.data._helpers import RowKeySamples - from google.cloud.bigtable.data._helpers import ShardedQuery + from google.cloud.bigtable.data._helpers import RowKeySamples, ShardedQuery from google.cloud.bigtable.data._sync_autogen.mutations_batcher import ( MutationsBatcher, ) diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/metrics_interceptor.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/metrics_interceptor.py index c5a59787c0fd..4ed886a48866 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/metrics_interceptor.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/metrics_interceptor.py @@ -15,14 +15,18 @@ # This file is automatically generated by CrossSync. Do not edit manually. from __future__ import annotations -from typing import Sequence -import time + from functools import wraps -from google.cloud.bigtable.data._metrics.data_model import ActiveOperationMetric -from google.cloud.bigtable.data._metrics.data_model import OperationState -from google.cloud.bigtable.data._metrics.data_model import OperationType -from grpc import UnaryUnaryClientInterceptor -from grpc import UnaryStreamClientInterceptor +import time +from typing import Sequence + +from grpc import UnaryStreamClientInterceptor, UnaryUnaryClientInterceptor + +from google.cloud.bigtable.data._metrics.data_model import ( + ActiveOperationMetric, + OperationState, + OperationType, +) def _with_active_operation(func): diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/mutations_batcher.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/mutations_batcher.py index 84f0ba8c0618..5e631867c858 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/mutations_batcher.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/_sync_autogen/mutations_batcher.py @@ -16,25 +16,33 @@ # This file is automatically generated by CrossSync. Do not edit manually. from __future__ import annotations -from typing import Sequence, TYPE_CHECKING, cast + import atexit -import warnings from collections import deque import concurrent.futures -from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup -from google.cloud.bigtable.data.exceptions import FailedMutationEntryError -from google.cloud.bigtable.data._helpers import _get_retryable_errors -from google.cloud.bigtable.data._helpers import _get_timeouts -from google.cloud.bigtable.data._helpers import TABLE_DEFAULT -from google.cloud.bigtable.data.mutations import _MUTATE_ROWS_REQUEST_MUTATION_LIMIT -from google.cloud.bigtable.data.mutations import Mutation +from typing import TYPE_CHECKING, Sequence, cast +import warnings + from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data._helpers import ( + TABLE_DEFAULT, + _get_retryable_errors, + _get_timeouts, +) +from google.cloud.bigtable.data.exceptions import ( + FailedMutationEntryError, + MutationsExceptionGroup, +) +from google.cloud.bigtable.data.mutations import ( + _MUTATE_ROWS_REQUEST_MUTATION_LIMIT, + Mutation, +) if TYPE_CHECKING: - from google.cloud.bigtable.data.mutations import RowMutationEntry from google.cloud.bigtable.data._sync_autogen.client import ( _DataApiTarget as TargetType, ) + from google.cloud.bigtable.data.mutations import RowMutationEntry _MB_SIZE = 1024 * 1024 diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/exceptions.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/exceptions.py index b19e0e5ea126..bfc1a9eadddd 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/exceptions.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/exceptions.py @@ -15,10 +15,10 @@ from __future__ import annotations import sys - -from typing import Any, TYPE_CHECKING +from typing import TYPE_CHECKING, Any from google.api_core import exceptions as core_exceptions + from google.cloud.bigtable.data.row import Row is_311_plus = sys.version_info >= (3, 11) diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/__init__.py index 029e79b9390a..56edd9e160f0 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/__init__.py @@ -12,22 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. +from google.cloud.bigtable.data._cross_sync import CrossSync from google.cloud.bigtable.data.execute_query._async.execute_query_iterator import ( ExecuteQueryIteratorAsync, ) from google.cloud.bigtable.data.execute_query._sync_autogen.execute_query_iterator import ( ExecuteQueryIterator, ) -from google.cloud.bigtable.data.execute_query.metadata import ( - Metadata, - SqlType, -) +from google.cloud.bigtable.data.execute_query.metadata import Metadata, SqlType from google.cloud.bigtable.data.execute_query.values import ( ExecuteQueryValueType, QueryResultRow, Struct, ) -from google.cloud.bigtable.data._cross_sync import CrossSync CrossSync.add_mapping("ExecuteQueryIterator", ExecuteQueryIteratorAsync) CrossSync._Sync_Impl.add_mapping("ExecuteQueryIterator", ExecuteQueryIterator) diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_async/execute_query_iterator.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_async/execute_query_iterator.py index 2beda4cd65be..e4e88ce9d7b6 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_async/execute_query_iterator.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_async/execute_query_iterator.py @@ -14,19 +14,13 @@ from __future__ import annotations -from typing import ( - Any, - Dict, - Optional, - Sequence, - Tuple, - TYPE_CHECKING, -) +from typing import TYPE_CHECKING, Any, Dict, Optional, Sequence, Tuple + from google.api_core import retry as retries -from google.protobuf.message import Message from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper +from google.protobuf.message import Message -from google.cloud.bigtable.data.execute_query._byte_cursor import _ByteCursor +from google.cloud.bigtable.data._cross_sync import CrossSync from google.cloud.bigtable.data._helpers import ( _attempt_timeout_generator, _retry_exception_factory, @@ -35,19 +29,18 @@ EarlyMetadataCallError, InvalidExecuteQueryResponse, ) -from google.cloud.bigtable.data.execute_query.values import QueryResultRow -from google.cloud.bigtable.data.execute_query.metadata import Metadata +from google.cloud.bigtable.data.execute_query._byte_cursor import _ByteCursor from google.cloud.bigtable.data.execute_query._reader import ( _QueryResultRowReader, _Reader, ) +from google.cloud.bigtable.data.execute_query.metadata import Metadata +from google.cloud.bigtable.data.execute_query.values import QueryResultRow +from google.cloud.bigtable_v2.types.bigtable import ExecuteQueryResponse from google.cloud.bigtable_v2.types.bigtable import ( ExecuteQueryRequest as ExecuteQueryRequestPB, - ExecuteQueryResponse, ) -from google.cloud.bigtable.data._cross_sync import CrossSync - if TYPE_CHECKING: if CrossSync.is_async: from google.cloud.bigtable.data import BigtableDataClientAsync as DataClientType diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_query_result_parsing_utils.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_query_result_parsing_utils.py index a43539e55de0..b7eb1e9e9163 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_query_result_parsing_utils.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_query_result_parsing_utils.py @@ -13,14 +13,15 @@ # limitations under the License. from __future__ import annotations -from typing import Any, Callable, Dict, Type, Optional, Union +from typing import Any, Callable, Dict, Optional, Type, Union -from google.protobuf.message import Message +from google.api_core.datetime_helpers import DatetimeWithNanoseconds from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper -from google.cloud.bigtable.data.execute_query.values import Struct +from google.protobuf.message import Message + from google.cloud.bigtable.data.execute_query.metadata import SqlType +from google.cloud.bigtable.data.execute_query.values import Struct from google.cloud.bigtable_v2 import Value as PBValue -from google.api_core.datetime_helpers import DatetimeWithNanoseconds _REQUIRED_PROTO_FIELDS = { SqlType.Bytes: "bytes_value", diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_reader.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_reader.py index 467c2030fe67..b649136b5199 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_reader.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_reader.py @@ -13,29 +13,20 @@ # limitations under the License. from __future__ import annotations -from typing import ( - List, - TypeVar, - Generic, - Iterable, - Optional, - Sequence, -) from abc import ABC, abstractmethod -from google.protobuf.message import Message -from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper +from typing import Generic, Iterable, List, Optional, Sequence, TypeVar -from google.cloud.bigtable_v2 import ProtoRows, Value as PBValue +from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper +from google.protobuf.message import Message from google.cloud.bigtable.data.execute_query._query_result_parsing_utils import ( _parse_pb_value_to_python_value, ) - -from google.cloud.bigtable.helpers import batched - -from google.cloud.bigtable.data.execute_query.values import QueryResultRow from google.cloud.bigtable.data.execute_query.metadata import Metadata - +from google.cloud.bigtable.data.execute_query.values import QueryResultRow +from google.cloud.bigtable.helpers import batched +from google.cloud.bigtable_v2 import ProtoRows +from google.cloud.bigtable_v2 import Value as PBValue T = TypeVar("T") diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_sync_autogen/execute_query_iterator.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_sync_autogen/execute_query_iterator.py index 68594d0e867a..2675892a9f76 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_sync_autogen/execute_query_iterator.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_sync_autogen/execute_query_iterator.py @@ -16,11 +16,14 @@ # This file is automatically generated by CrossSync. Do not edit manually. from __future__ import annotations -from typing import Any, Dict, Optional, Sequence, Tuple, TYPE_CHECKING + +from typing import TYPE_CHECKING, Any, Dict, Optional, Sequence, Tuple + from google.api_core import retry as retries -from google.protobuf.message import Message from google.protobuf.internal.enum_type_wrapper import EnumTypeWrapper -from google.cloud.bigtable.data.execute_query._byte_cursor import _ByteCursor +from google.protobuf.message import Message + +from google.cloud.bigtable.data._cross_sync import CrossSync from google.cloud.bigtable.data._helpers import ( _attempt_timeout_generator, _retry_exception_factory, @@ -29,17 +32,17 @@ EarlyMetadataCallError, InvalidExecuteQueryResponse, ) -from google.cloud.bigtable.data.execute_query.values import QueryResultRow -from google.cloud.bigtable.data.execute_query.metadata import Metadata +from google.cloud.bigtable.data.execute_query._byte_cursor import _ByteCursor from google.cloud.bigtable.data.execute_query._reader import ( _QueryResultRowReader, _Reader, ) +from google.cloud.bigtable.data.execute_query.metadata import Metadata +from google.cloud.bigtable.data.execute_query.values import QueryResultRow +from google.cloud.bigtable_v2.types.bigtable import ExecuteQueryResponse from google.cloud.bigtable_v2.types.bigtable import ( ExecuteQueryRequest as ExecuteQueryRequestPB, - ExecuteQueryResponse, ) -from google.cloud.bigtable.data._cross_sync import CrossSync if TYPE_CHECKING: from google.cloud.bigtable.data import BigtableDataClient as DataClientType diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/values.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/values.py index 80a0bff6f7b9..37b7def8c62a 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/values.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/values.py @@ -13,19 +13,10 @@ # limitations under the License. from collections import defaultdict -from typing import ( - Optional, - List, - Dict, - Set, - Union, - TypeVar, - Generic, - Tuple, - Mapping, -) -from google.type import date_pb2 # type: ignore +from typing import Dict, Generic, List, Mapping, Optional, Set, Tuple, TypeVar, Union + from google.api_core.datetime_helpers import DatetimeWithNanoseconds +from google.type import date_pb2 # type: ignore T = TypeVar("T") diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/mutations.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/mutations.py index f19b1e49e862..d85a3917b95d 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/mutations.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/mutations.py @@ -13,18 +13,17 @@ # limitations under the License. # from __future__ import annotations -from typing import Any -import time -from dataclasses import dataclass + from abc import ABC, abstractmethod +from dataclasses import dataclass from sys import getsizeof +import time +from typing import Any +from google.cloud.bigtable.data.read_modify_write_rules import _MAX_INCREMENT_VALUE import google.cloud.bigtable_v2.types.bigtable as types_pb import google.cloud.bigtable_v2.types.data as data_pb -from google.cloud.bigtable.data.read_modify_write_rules import _MAX_INCREMENT_VALUE - - # special value for SetCell mutation timestamps. If set, server will assign a timestamp _SERVER_SIDE_TIMESTAMP = -1 diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/read_rows_query.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/read_rows_query.py index 7652bfbb9af7..9fd42e2d871a 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/read_rows_query.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/read_rows_query.py @@ -13,19 +13,18 @@ # limitations under the License. # from __future__ import annotations -from typing import TYPE_CHECKING, Any -from bisect import bisect_left -from bisect import bisect_right + +from bisect import bisect_left, bisect_right from collections import defaultdict -from google.cloud.bigtable.data.row_filters import RowFilter +from typing import TYPE_CHECKING, Any +from google.cloud.bigtable.data.row_filters import RowFilter +from google.cloud.bigtable_v2.types import ReadRowsRequest as ReadRowsRequestPB from google.cloud.bigtable_v2.types import RowRange as RowRangePB from google.cloud.bigtable_v2.types import RowSet as RowSetPB -from google.cloud.bigtable_v2.types import ReadRowsRequest as ReadRowsRequestPB if TYPE_CHECKING: - from google.cloud.bigtable.data import RowKeySamples - from google.cloud.bigtable.data import ShardedQuery + from google.cloud.bigtable.data import RowKeySamples, ShardedQuery class RowRange: diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/row.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/row.py index 50e65a958c51..44027c5e2f2f 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/row.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/row.py @@ -15,8 +15,8 @@ from __future__ import annotations from collections import OrderedDict -from typing import Generator, overload, Any from functools import total_ordering +from typing import Any, Generator, overload from google.cloud.bigtable_v2.types import Row as RowPB diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/data/row_filters.py b/packages/google-cloud-bigtable/google/cloud/bigtable/data/row_filters.py index 9f09133d533d..bed8ed713cef 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/data/row_filters.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/data/row_filters.py @@ -15,13 +15,13 @@ """Filters for Google Cloud Bigtable Row classes.""" from __future__ import annotations -import struct - -from typing import Any, Sequence, TYPE_CHECKING, overload from abc import ABC, abstractmethod +import struct +from typing import TYPE_CHECKING, Any, Sequence, overload from google.cloud._helpers import _microseconds_from_datetime # type: ignore from google.cloud._helpers import _to_bytes # type: ignore + from google.cloud.bigtable_v2.types import data as data_v2_pb2 if TYPE_CHECKING: diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/enums.py b/packages/google-cloud-bigtable/google/cloud/bigtable/enums.py index 327b2f828c3b..2fa55da8056c 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/enums.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/enums.py @@ -13,9 +13,7 @@ # limitations under the License. """Wrappers for gapic enum types.""" -from google.cloud.bigtable_admin_v2.types import common -from google.cloud.bigtable_admin_v2.types import instance -from google.cloud.bigtable_admin_v2.types import table +from google.cloud.bigtable_admin_v2.types import common, instance, table class StorageType(object): diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/helpers.py b/packages/google-cloud-bigtable/google/cloud/bigtable/helpers.py index 78af430892fc..6bc423f50439 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/helpers.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/helpers.py @@ -12,9 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TypeVar, Iterable, Generator, Tuple - from itertools import islice +from typing import Generator, Iterable, Tuple, TypeVar T = TypeVar("T") diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/instance.py b/packages/google-cloud-bigtable/google/cloud/bigtable/instance.py index 23fb1c95dece..6fe6defcd963 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/instance.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/instance.py @@ -15,24 +15,18 @@ """User-friendly container for Google Cloud Bigtable Instance.""" import re +import warnings + +from google.api_core.exceptions import NotFound +from google.iam.v1 import options_pb2 # type: ignore +from google.protobuf import field_mask_pb2 from google.cloud.bigtable.app_profile import AppProfile from google.cloud.bigtable.cluster import Cluster +from google.cloud.bigtable.policy import Policy from google.cloud.bigtable.table import Table - -from google.protobuf import field_mask_pb2 - from google.cloud.bigtable_admin_v2.types import instance -from google.iam.v1 import options_pb2 # type: ignore - -from google.api_core.exceptions import NotFound - -from google.cloud.bigtable.policy import Policy - -import warnings - - _INSTANCE_NAME_RE = re.compile( r"^projects/(?P[^/]+)/" r"instances/(?P[a-z][-a-z0-9]*)$" ) diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/row.py b/packages/google-cloud-bigtable/google/cloud/bigtable/row.py index 752458a08a79..165d961ec2c7 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/row.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/row.py @@ -20,8 +20,8 @@ from google.cloud._helpers import _datetime_from_microseconds # type: ignore from google.cloud._helpers import _microseconds_from_datetime # type: ignore from google.cloud._helpers import _to_bytes # type: ignore -from google.cloud.bigtable_v2.types import data as data_v2_pb2 +from google.cloud.bigtable_v2.types import data as data_v2_pb2 _PACK_I64 = struct.Struct(">q").pack diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/row_data.py b/packages/google-cloud-bigtable/google/cloud/bigtable/row_data.py index e11379108c4f..7aaab18871a8 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/row_data.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/row_data.py @@ -16,18 +16,16 @@ import copy - -import grpc # type: ignore import warnings -from google.api_core import exceptions -from google.api_core import retry + +from google.api_core import exceptions, retry from google.cloud._helpers import _to_bytes # type: ignore +import grpc # type: ignore +from google.cloud.bigtable.row import Cell, InvalidChunk, PartialRowData from google.cloud.bigtable.row_merger import _RowMerger, _State from google.cloud.bigtable_v2.types import bigtable as data_messages_v2_pb2 from google.cloud.bigtable_v2.types import data as data_v2_pb2 -from google.cloud.bigtable.row import Cell, InvalidChunk, PartialRowData - # Some classes need to be re-exported here to keep backwards # compatibility. Those classes were moved to row_merger, but we dont want to diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/row_filters.py b/packages/google-cloud-bigtable/google/cloud/bigtable/row_filters.py index 53192acc86d0..e1301d01a9f3 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/row_filters.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/row_filters.py @@ -16,9 +16,9 @@ import struct - from google.cloud._helpers import _microseconds_from_datetime # type: ignore from google.cloud._helpers import _to_bytes # type: ignore + from google.cloud.bigtable_v2.types import data as data_v2_pb2 _PACK_I64 = struct.Struct(">q").pack diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/row_merger.py b/packages/google-cloud-bigtable/google/cloud/bigtable/row_merger.py index 515b91df7ef2..e6be9884d0f4 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/row_merger.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/row_merger.py @@ -1,6 +1,7 @@ -from enum import Enum from collections import OrderedDict -from google.cloud.bigtable.row import Cell, PartialRowData, InvalidChunk +from enum import Enum + +from google.cloud.bigtable.row import Cell, InvalidChunk, PartialRowData _MISSING_COLUMN_FAMILY = "Column family {} is not among the cells stored in this row." _MISSING_COLUMN = ( diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable/table.py b/packages/google-cloud-bigtable/google/cloud/bigtable/table.py index 0009f287ef85..dc3caadb3b68 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable/table.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable/table.py @@ -18,40 +18,41 @@ import warnings from google.api_core import timeout -from google.api_core.exceptions import Aborted -from google.api_core.exceptions import DeadlineExceeded -from google.api_core.exceptions import NotFound -from google.api_core.exceptions import RetryError -from google.api_core.exceptions import ServiceUnavailable -from google.api_core.exceptions import InternalServerError +from google.api_core.exceptions import ( + Aborted, + DeadlineExceeded, + InternalServerError, + NotFound, + RetryError, + ServiceUnavailable, +) from google.api_core.gapic_v1.method import DEFAULT -from google.api_core.retry import if_exception_type -from google.api_core.retry import Retry +from google.api_core.retry import Retry, if_exception_type from google.cloud._helpers import _to_bytes # type: ignore + +from google.cloud.bigtable import enums from google.cloud.bigtable.backup import Backup -from google.cloud.bigtable.column_family import _gc_rule_from_pb -from google.cloud.bigtable.column_family import ColumnFamily -from google.cloud.bigtable.batcher import MutationsBatcher -from google.cloud.bigtable.batcher import FLUSH_COUNT, MAX_MUTATION_SIZE +from google.cloud.bigtable.batcher import ( + FLUSH_COUNT, + MAX_MUTATION_SIZE, + MutationsBatcher, +) +from google.cloud.bigtable.column_family import ColumnFamily, _gc_rule_from_pb from google.cloud.bigtable.encryption_info import EncryptionInfo from google.cloud.bigtable.policy import Policy -from google.cloud.bigtable.row import AppendRow -from google.cloud.bigtable.row import ConditionalRow -from google.cloud.bigtable.row import DirectRow +from google.cloud.bigtable.row import AppendRow, ConditionalRow, DirectRow from google.cloud.bigtable.row_data import ( + DEFAULT_RETRY_READ_ROWS, PartialRowsData, _retriable_internal_server_error, ) -from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS -from google.cloud.bigtable.row_set import RowSet -from google.cloud.bigtable.row_set import RowRange -from google.cloud.bigtable import enums -from google.cloud.bigtable_v2.types import bigtable as data_messages_v2_pb2 +from google.cloud.bigtable.row_set import RowRange, RowSet from google.cloud.bigtable_admin_v2 import BaseBigtableTableAdminClient -from google.cloud.bigtable_admin_v2.types import table as admin_messages_v2_pb2 from google.cloud.bigtable_admin_v2.types import ( bigtable_table_admin as table_admin_messages_v2_pb2, ) +from google.cloud.bigtable_admin_v2.types import table as admin_messages_v2_pb2 +from google.cloud.bigtable_v2.types import bigtable as data_messages_v2_pb2 # Maximum number of mutations in bulk (MutateRowsRequest message): # (https://cloud.google.com/bigtable/docs/reference/data/rpc/ diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin/__init__.py index 2d95b06c849f..faa7a5506811 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin/__init__.py @@ -18,307 +18,145 @@ __version__ = package_version.__version__ -from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin.client import ( - BigtableInstanceAdminClient, -) from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin.async_client import ( BigtableInstanceAdminAsyncClient, ) -from google.cloud.bigtable_admin_v2.services.bigtable_table_admin.client import ( - BaseBigtableTableAdminClient, +from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin.client import ( + BigtableInstanceAdminClient, ) from google.cloud.bigtable_admin_v2.services.bigtable_table_admin.async_client import ( BaseBigtableTableAdminAsyncClient, ) - -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( - CreateAppProfileRequest, +from google.cloud.bigtable_admin_v2.services.bigtable_table_admin.client import ( + BaseBigtableTableAdminClient, ) from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( + CreateAppProfileRequest, CreateClusterMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( CreateClusterRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( CreateInstanceMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( CreateInstanceRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( CreateLogicalViewMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( CreateLogicalViewRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( CreateMaterializedViewMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( CreateMaterializedViewRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( DeleteAppProfileRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( DeleteClusterRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( DeleteInstanceRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( DeleteLogicalViewRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( DeleteMaterializedViewRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( GetAppProfileRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( GetClusterRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( GetInstanceRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( GetLogicalViewRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( GetMaterializedViewRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( ListAppProfilesRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( ListAppProfilesResponse, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( ListClustersRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( ListClustersResponse, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( ListHotTabletsRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( ListHotTabletsResponse, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( ListInstancesRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( ListInstancesResponse, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( ListLogicalViewsRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( ListLogicalViewsResponse, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( ListMaterializedViewsRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( ListMaterializedViewsResponse, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( PartialUpdateClusterMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( PartialUpdateClusterRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( PartialUpdateInstanceRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( UpdateAppProfileMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( UpdateAppProfileRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( UpdateClusterMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( UpdateInstanceMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( UpdateLogicalViewMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( UpdateLogicalViewRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( UpdateMaterializedViewMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_instance_admin import ( UpdateMaterializedViewRequest, ) from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( CheckConsistencyRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( CheckConsistencyResponse, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import CopyBackupMetadata -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import CopyBackupRequest -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( + CopyBackupMetadata, + CopyBackupRequest, CreateAuthorizedViewMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( CreateAuthorizedViewRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( CreateBackupMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( CreateBackupRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( CreateSchemaBundleMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( CreateSchemaBundleRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( CreateTableFromSnapshotMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( CreateTableFromSnapshotRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import CreateTableRequest -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( + CreateTableRequest, DataBoostReadLocalWrites, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( DeleteAuthorizedViewRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( DeleteBackupRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( DeleteSchemaBundleRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( DeleteSnapshotRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import DeleteTableRequest -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( + DeleteTableRequest, DropRowRangeRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( GenerateConsistencyTokenRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( GenerateConsistencyTokenResponse, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( GetAuthorizedViewRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import GetBackupRequest -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( + GetBackupRequest, GetSchemaBundleRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import GetSnapshotRequest -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import GetTableRequest -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( + GetSnapshotRequest, + GetTableRequest, ListAuthorizedViewsRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( ListAuthorizedViewsResponse, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ListBackupsRequest -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( + ListBackupsRequest, ListBackupsResponse, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( ListSchemaBundlesRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( ListSchemaBundlesResponse, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( ListSnapshotsRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( ListSnapshotsResponse, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ListTablesRequest -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ListTablesResponse -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( + ListTablesRequest, + ListTablesResponse, ModifyColumnFamiliesRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( OptimizeRestoredTableMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( RestoreTableMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( RestoreTableRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( SnapshotTableMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( SnapshotTableRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( StandardReadRemoteWrites, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( UndeleteTableMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( UndeleteTableRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( UpdateAuthorizedViewMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( UpdateAuthorizedViewRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( UpdateBackupRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( UpdateSchemaBundleMetadata, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( UpdateSchemaBundleRequest, -) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import ( UpdateTableMetadata, + UpdateTableRequest, +) +from google.cloud.bigtable_admin_v2.types.common import OperationProgress, StorageType +from google.cloud.bigtable_admin_v2.types.instance import ( + AppProfile, + AutoscalingLimits, + AutoscalingTargets, + Cluster, + HotTablet, + Instance, + LogicalView, + MaterializedView, +) +from google.cloud.bigtable_admin_v2.types.table import ( + AuthorizedView, + Backup, + BackupInfo, + ChangeStreamConfig, + ColumnFamily, + EncryptionInfo, + GcRule, + ProtoSchema, + RestoreInfo, + RestoreSourceType, + SchemaBundle, + Snapshot, + Table, + TieredStorageConfig, + TieredStorageRule, ) -from google.cloud.bigtable_admin_v2.types.bigtable_table_admin import UpdateTableRequest -from google.cloud.bigtable_admin_v2.types.common import OperationProgress -from google.cloud.bigtable_admin_v2.types.common import StorageType -from google.cloud.bigtable_admin_v2.types.instance import AppProfile -from google.cloud.bigtable_admin_v2.types.instance import AutoscalingLimits -from google.cloud.bigtable_admin_v2.types.instance import AutoscalingTargets -from google.cloud.bigtable_admin_v2.types.instance import Cluster -from google.cloud.bigtable_admin_v2.types.instance import HotTablet -from google.cloud.bigtable_admin_v2.types.instance import Instance -from google.cloud.bigtable_admin_v2.types.instance import LogicalView -from google.cloud.bigtable_admin_v2.types.instance import MaterializedView -from google.cloud.bigtable_admin_v2.types.table import AuthorizedView -from google.cloud.bigtable_admin_v2.types.table import Backup -from google.cloud.bigtable_admin_v2.types.table import BackupInfo -from google.cloud.bigtable_admin_v2.types.table import ChangeStreamConfig -from google.cloud.bigtable_admin_v2.types.table import ColumnFamily -from google.cloud.bigtable_admin_v2.types.table import EncryptionInfo -from google.cloud.bigtable_admin_v2.types.table import GcRule -from google.cloud.bigtable_admin_v2.types.table import ProtoSchema -from google.cloud.bigtable_admin_v2.types.table import RestoreInfo -from google.cloud.bigtable_admin_v2.types.table import SchemaBundle -from google.cloud.bigtable_admin_v2.types.table import Snapshot -from google.cloud.bigtable_admin_v2.types.table import Table -from google.cloud.bigtable_admin_v2.types.table import TieredStorageConfig -from google.cloud.bigtable_admin_v2.types.table import TieredStorageRule -from google.cloud.bigtable_admin_v2.types.table import RestoreSourceType from google.cloud.bigtable_admin_v2.types.types import Type __all__ = ( diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/__init__.py index 6a47979fd5ec..1df7a3467dab 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/__init__.py @@ -13,10 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.bigtable_admin_v2 import gapic_version as package_version +import sys import google.api_core as api_core -import sys + +from google.cloud.bigtable_admin_v2 import gapic_version as package_version __version__ = package_version.__version__ @@ -27,132 +28,141 @@ # this code path once we drop support for Python 3.7 import importlib_metadata as metadata - -from .services.bigtable_instance_admin import BigtableInstanceAdminClient -from .services.bigtable_instance_admin import BigtableInstanceAdminAsyncClient -from .services.bigtable_table_admin import BaseBigtableTableAdminClient -from .services.bigtable_table_admin import BaseBigtableTableAdminAsyncClient - -from .types.bigtable_instance_admin import CreateAppProfileRequest -from .types.bigtable_instance_admin import CreateClusterMetadata -from .types.bigtable_instance_admin import CreateClusterRequest -from .types.bigtable_instance_admin import CreateInstanceMetadata -from .types.bigtable_instance_admin import CreateInstanceRequest -from .types.bigtable_instance_admin import CreateLogicalViewMetadata -from .types.bigtable_instance_admin import CreateLogicalViewRequest -from .types.bigtable_instance_admin import CreateMaterializedViewMetadata -from .types.bigtable_instance_admin import CreateMaterializedViewRequest -from .types.bigtable_instance_admin import DeleteAppProfileRequest -from .types.bigtable_instance_admin import DeleteClusterRequest -from .types.bigtable_instance_admin import DeleteInstanceRequest -from .types.bigtable_instance_admin import DeleteLogicalViewRequest -from .types.bigtable_instance_admin import DeleteMaterializedViewRequest -from .types.bigtable_instance_admin import GetAppProfileRequest -from .types.bigtable_instance_admin import GetClusterRequest -from .types.bigtable_instance_admin import GetInstanceRequest -from .types.bigtable_instance_admin import GetLogicalViewRequest -from .types.bigtable_instance_admin import GetMaterializedViewRequest -from .types.bigtable_instance_admin import ListAppProfilesRequest -from .types.bigtable_instance_admin import ListAppProfilesResponse -from .types.bigtable_instance_admin import ListClustersRequest -from .types.bigtable_instance_admin import ListClustersResponse -from .types.bigtable_instance_admin import ListHotTabletsRequest -from .types.bigtable_instance_admin import ListHotTabletsResponse -from .types.bigtable_instance_admin import ListInstancesRequest -from .types.bigtable_instance_admin import ListInstancesResponse -from .types.bigtable_instance_admin import ListLogicalViewsRequest -from .types.bigtable_instance_admin import ListLogicalViewsResponse -from .types.bigtable_instance_admin import ListMaterializedViewsRequest -from .types.bigtable_instance_admin import ListMaterializedViewsResponse -from .types.bigtable_instance_admin import PartialUpdateClusterMetadata -from .types.bigtable_instance_admin import PartialUpdateClusterRequest -from .types.bigtable_instance_admin import PartialUpdateInstanceRequest -from .types.bigtable_instance_admin import UpdateAppProfileMetadata -from .types.bigtable_instance_admin import UpdateAppProfileRequest -from .types.bigtable_instance_admin import UpdateClusterMetadata -from .types.bigtable_instance_admin import UpdateInstanceMetadata -from .types.bigtable_instance_admin import UpdateLogicalViewMetadata -from .types.bigtable_instance_admin import UpdateLogicalViewRequest -from .types.bigtable_instance_admin import UpdateMaterializedViewMetadata -from .types.bigtable_instance_admin import UpdateMaterializedViewRequest -from .types.bigtable_table_admin import CheckConsistencyRequest -from .types.bigtable_table_admin import CheckConsistencyResponse -from .types.bigtable_table_admin import CopyBackupMetadata -from .types.bigtable_table_admin import CopyBackupRequest -from .types.bigtable_table_admin import CreateAuthorizedViewMetadata -from .types.bigtable_table_admin import CreateAuthorizedViewRequest -from .types.bigtable_table_admin import CreateBackupMetadata -from .types.bigtable_table_admin import CreateBackupRequest -from .types.bigtable_table_admin import CreateSchemaBundleMetadata -from .types.bigtable_table_admin import CreateSchemaBundleRequest -from .types.bigtable_table_admin import CreateTableFromSnapshotMetadata -from .types.bigtable_table_admin import CreateTableFromSnapshotRequest -from .types.bigtable_table_admin import CreateTableRequest -from .types.bigtable_table_admin import DataBoostReadLocalWrites -from .types.bigtable_table_admin import DeleteAuthorizedViewRequest -from .types.bigtable_table_admin import DeleteBackupRequest -from .types.bigtable_table_admin import DeleteSchemaBundleRequest -from .types.bigtable_table_admin import DeleteSnapshotRequest -from .types.bigtable_table_admin import DeleteTableRequest -from .types.bigtable_table_admin import DropRowRangeRequest -from .types.bigtable_table_admin import GenerateConsistencyTokenRequest -from .types.bigtable_table_admin import GenerateConsistencyTokenResponse -from .types.bigtable_table_admin import GetAuthorizedViewRequest -from .types.bigtable_table_admin import GetBackupRequest -from .types.bigtable_table_admin import GetSchemaBundleRequest -from .types.bigtable_table_admin import GetSnapshotRequest -from .types.bigtable_table_admin import GetTableRequest -from .types.bigtable_table_admin import ListAuthorizedViewsRequest -from .types.bigtable_table_admin import ListAuthorizedViewsResponse -from .types.bigtable_table_admin import ListBackupsRequest -from .types.bigtable_table_admin import ListBackupsResponse -from .types.bigtable_table_admin import ListSchemaBundlesRequest -from .types.bigtable_table_admin import ListSchemaBundlesResponse -from .types.bigtable_table_admin import ListSnapshotsRequest -from .types.bigtable_table_admin import ListSnapshotsResponse -from .types.bigtable_table_admin import ListTablesRequest -from .types.bigtable_table_admin import ListTablesResponse -from .types.bigtable_table_admin import ModifyColumnFamiliesRequest -from .types.bigtable_table_admin import OptimizeRestoredTableMetadata -from .types.bigtable_table_admin import RestoreTableMetadata -from .types.bigtable_table_admin import RestoreTableRequest -from .types.bigtable_table_admin import SnapshotTableMetadata -from .types.bigtable_table_admin import SnapshotTableRequest -from .types.bigtable_table_admin import StandardReadRemoteWrites -from .types.bigtable_table_admin import UndeleteTableMetadata -from .types.bigtable_table_admin import UndeleteTableRequest -from .types.bigtable_table_admin import UpdateAuthorizedViewMetadata -from .types.bigtable_table_admin import UpdateAuthorizedViewRequest -from .types.bigtable_table_admin import UpdateBackupRequest -from .types.bigtable_table_admin import UpdateSchemaBundleMetadata -from .types.bigtable_table_admin import UpdateSchemaBundleRequest -from .types.bigtable_table_admin import UpdateTableMetadata -from .types.bigtable_table_admin import UpdateTableRequest -from .types.common import OperationProgress -from .types.common import StorageType -from .types.instance import AppProfile -from .types.instance import AutoscalingLimits -from .types.instance import AutoscalingTargets -from .types.instance import Cluster -from .types.instance import HotTablet -from .types.instance import Instance -from .types.instance import LogicalView -from .types.instance import MaterializedView -from .types.table import AuthorizedView -from .types.table import Backup -from .types.table import BackupInfo -from .types.table import ChangeStreamConfig -from .types.table import ColumnFamily -from .types.table import EncryptionInfo -from .types.table import GcRule -from .types.table import ProtoSchema -from .types.table import RestoreInfo -from .types.table import SchemaBundle -from .types.table import Snapshot -from .types.table import Table -from .types.table import TieredStorageConfig -from .types.table import TieredStorageRule -from .types.table import RestoreSourceType +from .services.bigtable_instance_admin import ( + BigtableInstanceAdminAsyncClient, + BigtableInstanceAdminClient, +) +from .services.bigtable_table_admin import ( + BaseBigtableTableAdminAsyncClient, + BaseBigtableTableAdminClient, +) +from .types.bigtable_instance_admin import ( + CreateAppProfileRequest, + CreateClusterMetadata, + CreateClusterRequest, + CreateInstanceMetadata, + CreateInstanceRequest, + CreateLogicalViewMetadata, + CreateLogicalViewRequest, + CreateMaterializedViewMetadata, + CreateMaterializedViewRequest, + DeleteAppProfileRequest, + DeleteClusterRequest, + DeleteInstanceRequest, + DeleteLogicalViewRequest, + DeleteMaterializedViewRequest, + GetAppProfileRequest, + GetClusterRequest, + GetInstanceRequest, + GetLogicalViewRequest, + GetMaterializedViewRequest, + ListAppProfilesRequest, + ListAppProfilesResponse, + ListClustersRequest, + ListClustersResponse, + ListHotTabletsRequest, + ListHotTabletsResponse, + ListInstancesRequest, + ListInstancesResponse, + ListLogicalViewsRequest, + ListLogicalViewsResponse, + ListMaterializedViewsRequest, + ListMaterializedViewsResponse, + PartialUpdateClusterMetadata, + PartialUpdateClusterRequest, + PartialUpdateInstanceRequest, + UpdateAppProfileMetadata, + UpdateAppProfileRequest, + UpdateClusterMetadata, + UpdateInstanceMetadata, + UpdateLogicalViewMetadata, + UpdateLogicalViewRequest, + UpdateMaterializedViewMetadata, + UpdateMaterializedViewRequest, +) +from .types.bigtable_table_admin import ( + CheckConsistencyRequest, + CheckConsistencyResponse, + CopyBackupMetadata, + CopyBackupRequest, + CreateAuthorizedViewMetadata, + CreateAuthorizedViewRequest, + CreateBackupMetadata, + CreateBackupRequest, + CreateSchemaBundleMetadata, + CreateSchemaBundleRequest, + CreateTableFromSnapshotMetadata, + CreateTableFromSnapshotRequest, + CreateTableRequest, + DataBoostReadLocalWrites, + DeleteAuthorizedViewRequest, + DeleteBackupRequest, + DeleteSchemaBundleRequest, + DeleteSnapshotRequest, + DeleteTableRequest, + DropRowRangeRequest, + GenerateConsistencyTokenRequest, + GenerateConsistencyTokenResponse, + GetAuthorizedViewRequest, + GetBackupRequest, + GetSchemaBundleRequest, + GetSnapshotRequest, + GetTableRequest, + ListAuthorizedViewsRequest, + ListAuthorizedViewsResponse, + ListBackupsRequest, + ListBackupsResponse, + ListSchemaBundlesRequest, + ListSchemaBundlesResponse, + ListSnapshotsRequest, + ListSnapshotsResponse, + ListTablesRequest, + ListTablesResponse, + ModifyColumnFamiliesRequest, + OptimizeRestoredTableMetadata, + RestoreTableMetadata, + RestoreTableRequest, + SnapshotTableMetadata, + SnapshotTableRequest, + StandardReadRemoteWrites, + UndeleteTableMetadata, + UndeleteTableRequest, + UpdateAuthorizedViewMetadata, + UpdateAuthorizedViewRequest, + UpdateBackupRequest, + UpdateSchemaBundleMetadata, + UpdateSchemaBundleRequest, + UpdateTableMetadata, + UpdateTableRequest, +) +from .types.common import OperationProgress, StorageType +from .types.instance import ( + AppProfile, + AutoscalingLimits, + AutoscalingTargets, + Cluster, + HotTablet, + Instance, + LogicalView, + MaterializedView, +) +from .types.table import ( + AuthorizedView, + Backup, + BackupInfo, + ChangeStreamConfig, + ColumnFamily, + EncryptionInfo, + GcRule, + ProtoSchema, + RestoreInfo, + RestoreSourceType, + SchemaBundle, + Snapshot, + Table, + TieredStorageConfig, + TieredStorageRule, +) from .types.types import Type if hasattr(api_core, "check_python_version") and hasattr( @@ -164,8 +174,8 @@ # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: - import warnings import sys + import warnings _py_version_str = sys.version.split()[0] _package_label = "google.cloud.bigtable_admin_v2" diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/__init__.py index f66c7f8dd885..a3ad5728cd0d 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/__init__.py @@ -29,17 +29,16 @@ # currently implemented as either types under overlay/types or in methods in an overwritten # client class under overlay/services. +from .services.bigtable_table_admin import ( + BigtableTableAdminAsyncClient, + BigtableTableAdminClient, +) from .types import ( AsyncRestoreTableOperation, RestoreTableOperation, WaitForConsistencyRequest, ) -from .services.bigtable_table_admin import ( - BigtableTableAdminAsyncClient, - BigtableTableAdminClient, -) - __all__ = ( "AsyncRestoreTableOperation", "RestoreTableOperation", diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/services/bigtable_table_admin/async_client.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/services/bigtable_table_admin/async_client.py index ee8e5757d23a..bd642eb15661 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/services/bigtable_table_admin/async_client.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/services/bigtable_table_admin/async_client.py @@ -29,8 +29,8 @@ import copy import functools - from typing import Callable, Optional, Sequence, Tuple, Union + from google.api_core import gapic_v1 from google.api_core import retry as retries @@ -42,22 +42,19 @@ from google.api_core import client_options as client_options_lib from google.auth import credentials as ga_credentials # type: ignore -from google.cloud.bigtable_admin_v2.types import bigtable_table_admin - +from google.cloud.bigtable.gapic_version import __version__ as bigtable_version +from google.cloud.bigtable_admin_v2.overlay.types import ( + async_consistency, + async_restore_table, + wait_for_consistency_request, +) from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( async_client as base_client, ) from google.cloud.bigtable_admin_v2.services.bigtable_table_admin.transports.base import ( BigtableTableAdminTransport, ) -from google.cloud.bigtable_admin_v2.overlay.types import ( - async_consistency, - async_restore_table, - wait_for_consistency_request, -) - -from google.cloud.bigtable.gapic_version import __version__ as bigtable_version - +from google.cloud.bigtable_admin_v2.types import bigtable_table_admin DEFAULT_CLIENT_INFO = copy.copy(base_client.DEFAULT_CLIENT_INFO) DEFAULT_CLIENT_INFO.client_library_version = f"{bigtable_version}-admin-overlay-async" diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/services/bigtable_table_admin/client.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/services/bigtable_table_admin/client.py index 1b6770b10195..56ec13b157d5 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/services/bigtable_table_admin/client.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/services/bigtable_table_admin/client.py @@ -29,8 +29,8 @@ import copy import functools - from typing import Callable, Optional, Sequence, Tuple, Union + from google.api_core import gapic_v1 from google.api_core import retry as retries @@ -42,22 +42,19 @@ from google.api_core import client_options as client_options_lib from google.auth import credentials as ga_credentials # type: ignore -from google.cloud.bigtable_admin_v2.types import bigtable_table_admin - +from google.cloud.bigtable.gapic_version import __version__ as bigtable_version +from google.cloud.bigtable_admin_v2.overlay.types import ( + consistency, + restore_table, + wait_for_consistency_request, +) from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( client as base_client, ) from google.cloud.bigtable_admin_v2.services.bigtable_table_admin.transports.base import ( BigtableTableAdminTransport, ) -from google.cloud.bigtable_admin_v2.overlay.types import ( - consistency, - restore_table, - wait_for_consistency_request, -) - -from google.cloud.bigtable.gapic_version import __version__ as bigtable_version - +from google.cloud.bigtable_admin_v2.types import bigtable_table_admin DEFAULT_CLIENT_INFO = copy.copy(base_client.DEFAULT_CLIENT_INFO) DEFAULT_CLIENT_INFO.client_library_version = f"{bigtable_version}-admin-overlay" diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/__init__.py index 16b032ac4743..165809742077 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/__init__.py @@ -12,17 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .async_restore_table import ( - AsyncRestoreTableOperation, -) - -from .restore_table import ( - RestoreTableOperation, -) - -from .wait_for_consistency_request import ( - WaitForConsistencyRequest, -) +from .async_restore_table import AsyncRestoreTableOperation +from .restore_table import RestoreTableOperation +from .wait_for_consistency_request import WaitForConsistencyRequest __all__ = ( "AsyncRestoreTableOperation", diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/async_consistency.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/async_consistency.py index 0703940d5138..0b5f94eeebdc 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/async_consistency.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/async_consistency.py @@ -12,11 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Awaitable, Union, Callable +from typing import Awaitable, Callable, Union -from google.api_core.future import async_future from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core.future import async_future + from google.cloud.bigtable_admin_v2.types import bigtable_table_admin try: diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/async_restore_table.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/async_restore_table.py index 9edfb4963cd3..ee834713b5d6 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/async_restore_table.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/async_restore_table.py @@ -14,8 +14,7 @@ from typing import Optional -from google.api_core import exceptions -from google.api_core import operation_async +from google.api_core import exceptions, operation_async from google.protobuf import empty_pb2 from google.cloud.bigtable_admin_v2.types import OptimizeRestoredTableMetadata diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/consistency.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/consistency.py index 63a110975442..2062950593be 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/consistency.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/consistency.py @@ -12,11 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Union, Callable +from typing import Callable, Union -from google.api_core.future import polling from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core.future import polling + from google.cloud.bigtable_admin_v2.types import bigtable_table_admin try: diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/restore_table.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/restore_table.py index 84c9c5d91644..e8201d0c0a62 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/restore_table.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/overlay/types/restore_table.py @@ -14,8 +14,7 @@ from typing import Optional -from google.api_core import exceptions -from google.api_core import operation +from google.api_core import exceptions, operation from google.protobuf import empty_pb2 from google.cloud.bigtable_admin_v2.types import OptimizeRestoredTableMetadata diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/__init__.py index 20ac9e4fc5f6..eee3c02dcb5b 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/__init__.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import BigtableInstanceAdminClient from .async_client import BigtableInstanceAdminAsyncClient +from .client import BigtableInstanceAdminClient __all__ = ( "BigtableInstanceAdminClient", diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/async_client.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/async_client.py index 632496543912..c5d173e6f169 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/async_client.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/async_client.py @@ -13,12 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import logging as std_logging from collections import OrderedDict +import logging as std_logging import re from typing import ( - Dict, Callable, + Dict, Mapping, MutableMapping, MutableSequence, @@ -29,16 +29,15 @@ Union, ) -from google.cloud.bigtable_admin_v2 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore import google.protobuf +from google.cloud.bigtable_admin_v2 import gapic_version as package_version try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] @@ -47,18 +46,19 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import pagers -from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin -from google.cloud.bigtable_admin_v2.types import common -from google.cloud.bigtable_admin_v2.types import instance -from google.cloud.bigtable_admin_v2.types import instance as gba_instance from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BigtableInstanceAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import BigtableInstanceAdminGrpcAsyncIOTransport + +from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import pagers +from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin, common +from google.cloud.bigtable_admin_v2.types import instance +from google.cloud.bigtable_admin_v2.types import instance as gba_instance + from .client import BigtableInstanceAdminClient +from .transports.base import DEFAULT_CLIENT_INFO, BigtableInstanceAdminTransport +from .transports.grpc_asyncio import BigtableInstanceAdminGrpcAsyncIOTransport try: from google.api_core import client_logging # type: ignore diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/client.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/client.py index 9d64108bb4a1..a07be37fd8bb 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/client.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/client.py @@ -20,8 +20,8 @@ import os import re from typing import ( - Dict, Callable, + Dict, Mapping, MutableMapping, MutableSequence, @@ -34,19 +34,19 @@ ) import warnings -from google.cloud.bigtable_admin_v2 import gapic_version as package_version - from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore import google.protobuf +from google.cloud.bigtable_admin_v2 import gapic_version as package_version + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -63,16 +63,17 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import pagers -from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin -from google.cloud.bigtable_admin_v2.types import common -from google.cloud.bigtable_admin_v2.types import instance -from google.cloud.bigtable_admin_v2.types import instance as gba_instance from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BigtableInstanceAdminTransport, DEFAULT_CLIENT_INFO + +from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import pagers +from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin, common +from google.cloud.bigtable_admin_v2.types import instance +from google.cloud.bigtable_admin_v2.types import instance as gba_instance + +from .transports.base import DEFAULT_CLIENT_INFO, BigtableInstanceAdminTransport from .transports.grpc import BigtableInstanceAdminGrpcTransport from .transports.grpc_asyncio import BigtableInstanceAdminGrpcAsyncIOTransport from .transports.rest import BigtableInstanceAdminRestTransport diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/pagers.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/pagers.py index ce5b67b27324..a64e3057c98e 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/pagers.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/pagers.py @@ -13,21 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, Awaitable, Callable, + Iterator, + Optional, Sequence, Tuple, - Optional, - Iterator, Union, ) +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] OptionalAsyncRetry = Union[ @@ -37,8 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin -from google.cloud.bigtable_admin_v2.types import instance +from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin, instance class ListAppProfilesPager: diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/__init__.py index 021458f35945..aa23f481c2ab 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/__init__.py @@ -19,9 +19,10 @@ from .base import BigtableInstanceAdminTransport from .grpc import BigtableInstanceAdminGrpcTransport from .grpc_asyncio import BigtableInstanceAdminGrpcAsyncIOTransport -from .rest import BigtableInstanceAdminRestTransport -from .rest import BigtableInstanceAdminRestInterceptor - +from .rest import ( + BigtableInstanceAdminRestInterceptor, + BigtableInstanceAdminRestTransport, +) # Compile a registry of transports. _transport_registry = ( diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/base.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/base.py index 3a05dd6631ca..0b6640cb2c0e 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/base.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/base.py @@ -16,25 +16,22 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.bigtable_admin_v2 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin -from google.cloud.bigtable_admin_v2.types import instance from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf from google.protobuf import empty_pb2 # type: ignore +from google.cloud.bigtable_admin_v2 import gapic_version as package_version +from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin, instance + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc.py index d5d5cf1e53b9..12a507306de1 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc.py @@ -16,28 +16,25 @@ import json import logging as std_logging import pickle -import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, grpc_helpers, operations_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message - import grpc # type: ignore import proto # type: ignore -from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin -from google.cloud.bigtable_admin_v2.types import instance -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import BigtableInstanceAdminTransport, DEFAULT_CLIENT_INFO +from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin, instance + +from .base import DEFAULT_CLIENT_INFO, BigtableInstanceAdminTransport try: from google.api_core import client_logging # type: ignore diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc_asyncio.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc_asyncio.py index 7ce7627649fe..f13934806328 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/grpc_asyncio.py @@ -15,32 +15,29 @@ # import inspect import json -import pickle import logging as std_logging -import warnings +import pickle from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.api_core import retry_async as retries -from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message - import grpc # type: ignore -import proto # type: ignore from grpc.experimental import aio # type: ignore +import proto # type: ignore -from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin -from google.cloud.bigtable_admin_v2.types import instance -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import BigtableInstanceAdminTransport, DEFAULT_CLIENT_INFO +from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin, instance + +from .base import DEFAULT_CLIENT_INFO, BigtableInstanceAdminTransport from .grpc import BigtableInstanceAdminGrpcTransport try: diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest.py index 9879c4c45360..fd16775814b4 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest.py @@ -13,37 +13,29 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.api_core import operations_v1 - -from requests import __version__ as requests_version import dataclasses +import json # type: ignore +import logging from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings - -from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin -from google.cloud.bigtable_admin_v2.types import instance +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +import google.protobuf +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version +from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin, instance -from .rest_base import _BaseBigtableInstanceAdminRestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseBigtableInstanceAdminRestTransport try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest_base.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest_base.py index 9855756b8ee3..aa4143994da8 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest_base.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_instance_admin/transports/rest_base.py @@ -14,22 +14,19 @@ # limitations under the License. # import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import BigtableInstanceAdminTransport, DEFAULT_CLIENT_INFO - import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - -from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin -from google.cloud.bigtable_admin_v2.types import instance +from google.api_core import gapic_v1, path_template from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format + +from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin, instance + +from .base import DEFAULT_CLIENT_INFO, BigtableInstanceAdminTransport class _BaseBigtableInstanceAdminRestTransport(BigtableInstanceAdminTransport): diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/__init__.py index c5e8544d6423..c709fd07b84c 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/__init__.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import BaseBigtableTableAdminClient from .async_client import BaseBigtableTableAdminAsyncClient +from .client import BaseBigtableTableAdminClient __all__ = ( "BaseBigtableTableAdminClient", diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/async_client.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/async_client.py index 7f772c87c77f..0cb3c8479575 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/async_client.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/async_client.py @@ -13,12 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import logging as std_logging from collections import OrderedDict +import logging as std_logging import re from typing import ( - Dict, Callable, + Dict, Mapping, MutableMapping, MutableSequence, @@ -29,16 +29,15 @@ Union, ) -from google.cloud.bigtable_admin_v2 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore import google.protobuf +from google.cloud.bigtable_admin_v2 import gapic_version as package_version try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] @@ -47,18 +46,20 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import pagers from google.cloud.bigtable_admin_v2.types import bigtable_table_admin from google.cloud.bigtable_admin_v2.types import table from google.cloud.bigtable_admin_v2.types import table as gba_table from google.cloud.bigtable_admin_v2.types import types -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BigtableTableAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import BigtableTableAdminGrpcAsyncIOTransport + from .client import BaseBigtableTableAdminClient +from .transports.base import DEFAULT_CLIENT_INFO, BigtableTableAdminTransport +from .transports.grpc_asyncio import BigtableTableAdminGrpcAsyncIOTransport try: from google.api_core import client_logging # type: ignore diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/client.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/client.py index ce251db7d714..39ee60569fd7 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/client.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/client.py @@ -20,8 +20,8 @@ import os import re from typing import ( - Dict, Callable, + Dict, Mapping, MutableMapping, MutableSequence, @@ -34,19 +34,19 @@ ) import warnings -from google.cloud.bigtable_admin_v2 import gapic_version as package_version - from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore import google.protobuf +from google.cloud.bigtable_admin_v2 import gapic_version as package_version + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -63,16 +63,18 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import pagers from google.cloud.bigtable_admin_v2.types import bigtable_table_admin from google.cloud.bigtable_admin_v2.types import table from google.cloud.bigtable_admin_v2.types import table as gba_table from google.cloud.bigtable_admin_v2.types import types -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BigtableTableAdminTransport, DEFAULT_CLIENT_INFO + +from .transports.base import DEFAULT_CLIENT_INFO, BigtableTableAdminTransport from .transports.grpc import BigtableTableAdminGrpcTransport from .transports.grpc_asyncio import BigtableTableAdminGrpcAsyncIOTransport from .transports.rest import BigtableTableAdminRestTransport diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/pagers.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/pagers.py index e6d83ba63a0e..ee01f93f76a7 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/pagers.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/pagers.py @@ -13,21 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, Awaitable, Callable, + Iterator, + Optional, Sequence, Tuple, - Optional, - Iterator, Union, ) +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] OptionalAsyncRetry = Union[ @@ -37,8 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.cloud.bigtable_admin_v2.types import bigtable_table_admin -from google.cloud.bigtable_admin_v2.types import table +from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table class ListTablesPager: diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/__init__.py index e7621f781d0b..4d381c2292d5 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/__init__.py @@ -19,9 +19,7 @@ from .base import BigtableTableAdminTransport from .grpc import BigtableTableAdminGrpcTransport from .grpc_asyncio import BigtableTableAdminGrpcAsyncIOTransport -from .rest import BigtableTableAdminRestTransport -from .rest import BigtableTableAdminRestInterceptor - +from .rest import BigtableTableAdminRestInterceptor, BigtableTableAdminRestTransport # Compile a registry of transports. _transport_registry = ( diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/base.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/base.py index 8ad08df3ffa0..3131d29bb3ab 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/base.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/base.py @@ -16,25 +16,23 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.bigtable_admin_v2 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore import google.protobuf +from google.protobuf import empty_pb2 # type: ignore +from google.cloud.bigtable_admin_v2 import gapic_version as package_version from google.cloud.bigtable_admin_v2.types import bigtable_table_admin from google.cloud.bigtable_admin_v2.types import table from google.cloud.bigtable_admin_v2.types import table as gba_table -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc.py index f8d1058c8c32..b7c1bfde1cc7 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc.py @@ -16,29 +16,27 @@ import json import logging as std_logging import pickle -import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, grpc_helpers, operations_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message - import grpc # type: ignore import proto # type: ignore from google.cloud.bigtable_admin_v2.types import bigtable_table_admin from google.cloud.bigtable_admin_v2.types import table from google.cloud.bigtable_admin_v2.types import table as gba_table -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import BigtableTableAdminTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, BigtableTableAdminTransport try: from google.api_core import client_logging # type: ignore diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc_asyncio.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc_asyncio.py index 5017f17d0575..f4e4e567b495 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/grpc_asyncio.py @@ -15,33 +15,31 @@ # import inspect import json -import pickle import logging as std_logging -import warnings +import pickle from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.api_core import retry_async as retries -from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message - import grpc # type: ignore -import proto # type: ignore from grpc.experimental import aio # type: ignore +import proto # type: ignore from google.cloud.bigtable_admin_v2.types import bigtable_table_admin from google.cloud.bigtable_admin_v2.types import table from google.cloud.bigtable_admin_v2.types import table as gba_table -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import BigtableTableAdminTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, BigtableTableAdminTransport from .grpc import BigtableTableAdminGrpcTransport try: diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest.py index 6c3815f79437..92db84135248 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest.py @@ -13,38 +13,31 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import logging +import dataclasses import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore import google.protobuf - +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import json_format -from google.api_core import operations_v1 - from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - from google.cloud.bigtable_admin_v2.types import bigtable_table_admin from google.cloud.bigtable_admin_v2.types import table from google.cloud.bigtable_admin_v2.types import table as gba_table -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -from .rest_base import _BaseBigtableTableAdminRestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseBigtableTableAdminRestTransport try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest_base.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest_base.py index ef6c2374d2a2..cf51653a2cd2 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest_base.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/services/bigtable_table_admin/transports/rest_base.py @@ -14,23 +14,21 @@ # limitations under the License. # import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import BigtableTableAdminTransport, DEFAULT_CLIENT_INFO - import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +from google.api_core import gapic_v1, path_template +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format from google.cloud.bigtable_admin_v2.types import bigtable_table_admin from google.cloud.bigtable_admin_v2.types import table from google.cloud.bigtable_admin_v2.types import table as gba_table -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore + +from .base import DEFAULT_CLIENT_INFO, BigtableTableAdminTransport class _BaseBigtableTableAdminRestTransport(BigtableTableAdminTransport): diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/__init__.py index d2036c7a3cd0..8369a44fd5c6 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/__init__.py @@ -112,10 +112,7 @@ UpdateTableMetadata, UpdateTableRequest, ) -from .common import ( - OperationProgress, - StorageType, -) +from .common import OperationProgress, StorageType from .instance import ( AppProfile, AutoscalingLimits, @@ -136,16 +133,14 @@ GcRule, ProtoSchema, RestoreInfo, + RestoreSourceType, SchemaBundle, Snapshot, Table, TieredStorageConfig, TieredStorageRule, - RestoreSourceType, -) -from .types import ( - Type, ) +from .types import Type __all__ = ( "CreateAppProfileRequest", diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/bigtable_instance_admin.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/bigtable_instance_admin.py index 4197ed0b7424..ff56005992a3 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/bigtable_instance_admin.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/bigtable_instance_admin.py @@ -17,12 +17,11 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - -from google.cloud.bigtable_admin_v2.types import instance as gba_instance from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore +from google.cloud.bigtable_admin_v2.types import instance as gba_instance __protobuf__ = proto.module( package="google.bigtable.admin.v2", diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/bigtable_table_admin.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/bigtable_table_admin.py index 69de07a2ab8e..abce37bf9cdc 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/bigtable_table_admin.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/bigtable_table_admin.py @@ -17,14 +17,13 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - -from google.cloud.bigtable_admin_v2.types import common -from google.cloud.bigtable_admin_v2.types import table as gba_table from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore +from google.cloud.bigtable_admin_v2.types import common +from google.cloud.bigtable_admin_v2.types import table as gba_table __protobuf__ = proto.module( package="google.bigtable.admin.v2", diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/common.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/common.py index 7b05e5ff5a2e..b08f1619875e 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/common.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/common.py @@ -17,10 +17,8 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - from google.protobuf import timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( package="google.bigtable.admin.v2", diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/instance.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/instance.py index f07414d56957..9eb39da223d5 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/instance.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/instance.py @@ -17,11 +17,10 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.cloud.bigtable_admin_v2.types import common -from google.protobuf import timestamp_pb2 # type: ignore - __protobuf__ = proto.module( package="google.bigtable.admin.v2", diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/table.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/table.py index c4f23d5fa7bc..0555674d84f9 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/table.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/table.py @@ -17,14 +17,13 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - -from google.cloud.bigtable_admin_v2.types import types -from google.cloud.bigtable_admin_v2.utils import oneof_message from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore +import proto # type: ignore +from google.cloud.bigtable_admin_v2.types import types +from google.cloud.bigtable_admin_v2.utils import oneof_message __protobuf__ = proto.module( package="google.bigtable.admin.v2", diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/types.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/types.py index 4f56429dabff..0b89a8e1d22e 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/types.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/types/types.py @@ -19,7 +19,6 @@ import proto # type: ignore - __protobuf__ = proto.module( package="google.bigtable.admin.v2", manifest={ diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/utils/oneof_message.py b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/utils/oneof_message.py index e110d8fa6cf1..9c5f08615c98 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/utils/oneof_message.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_admin_v2/utils/oneof_message.py @@ -15,6 +15,7 @@ # # import collections.abc + import proto diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/__init__.py index ec552a85dbad..c3cc1a1fb440 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/__init__.py @@ -13,10 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.bigtable_v2 import gapic_version as package_version +import sys import google.api_core as api_core -import sys + +from google.cloud.bigtable_v2 import gapic_version as package_version __version__ = package_version.__version__ @@ -27,64 +28,67 @@ # this code path once we drop support for Python 3.7 import importlib_metadata as metadata - -from .services.bigtable import BigtableClient -from .services.bigtable import BigtableAsyncClient - -from .types.bigtable import CheckAndMutateRowRequest -from .types.bigtable import CheckAndMutateRowResponse -from .types.bigtable import ExecuteQueryRequest -from .types.bigtable import ExecuteQueryResponse -from .types.bigtable import GenerateInitialChangeStreamPartitionsRequest -from .types.bigtable import GenerateInitialChangeStreamPartitionsResponse -from .types.bigtable import MutateRowRequest -from .types.bigtable import MutateRowResponse -from .types.bigtable import MutateRowsRequest -from .types.bigtable import MutateRowsResponse -from .types.bigtable import PingAndWarmRequest -from .types.bigtable import PingAndWarmResponse -from .types.bigtable import PrepareQueryRequest -from .types.bigtable import PrepareQueryResponse -from .types.bigtable import RateLimitInfo -from .types.bigtable import ReadChangeStreamRequest -from .types.bigtable import ReadChangeStreamResponse -from .types.bigtable import ReadModifyWriteRowRequest -from .types.bigtable import ReadModifyWriteRowResponse -from .types.bigtable import ReadRowsRequest -from .types.bigtable import ReadRowsResponse -from .types.bigtable import SampleRowKeysRequest -from .types.bigtable import SampleRowKeysResponse -from .types.data import ArrayValue -from .types.data import Cell -from .types.data import Column -from .types.data import ColumnMetadata -from .types.data import ColumnRange -from .types.data import Family -from .types.data import Idempotency -from .types.data import Mutation -from .types.data import PartialResultSet -from .types.data import ProtoFormat -from .types.data import ProtoRows -from .types.data import ProtoRowsBatch -from .types.data import ProtoSchema -from .types.data import ReadModifyWriteRule -from .types.data import ResultSetMetadata -from .types.data import Row -from .types.data import RowFilter -from .types.data import RowRange -from .types.data import RowSet -from .types.data import StreamContinuationToken -from .types.data import StreamContinuationTokens -from .types.data import StreamPartition -from .types.data import TimestampRange -from .types.data import Value -from .types.data import ValueRange +from .services.bigtable import BigtableAsyncClient, BigtableClient +from .types.bigtable import ( + CheckAndMutateRowRequest, + CheckAndMutateRowResponse, + ExecuteQueryRequest, + ExecuteQueryResponse, + GenerateInitialChangeStreamPartitionsRequest, + GenerateInitialChangeStreamPartitionsResponse, + MutateRowRequest, + MutateRowResponse, + MutateRowsRequest, + MutateRowsResponse, + PingAndWarmRequest, + PingAndWarmResponse, + PrepareQueryRequest, + PrepareQueryResponse, + RateLimitInfo, + ReadChangeStreamRequest, + ReadChangeStreamResponse, + ReadModifyWriteRowRequest, + ReadModifyWriteRowResponse, + ReadRowsRequest, + ReadRowsResponse, + SampleRowKeysRequest, + SampleRowKeysResponse, +) +from .types.data import ( + ArrayValue, + Cell, + Column, + ColumnMetadata, + ColumnRange, + Family, + Idempotency, + Mutation, + PartialResultSet, + ProtoFormat, + ProtoRows, + ProtoRowsBatch, + ProtoSchema, + ReadModifyWriteRule, + ResultSetMetadata, + Row, + RowFilter, + RowRange, + RowSet, + StreamContinuationToken, + StreamContinuationTokens, + StreamPartition, + TimestampRange, + Value, + ValueRange, +) from .types.feature_flags import FeatureFlags from .types.peer_info import PeerInfo -from .types.request_stats import FullReadStatsView -from .types.request_stats import ReadIterationStats -from .types.request_stats import RequestLatencyStats -from .types.request_stats import RequestStats +from .types.request_stats import ( + FullReadStatsView, + ReadIterationStats, + RequestLatencyStats, + RequestStats, +) from .types.response_params import ResponseParams from .types.types import Type @@ -97,8 +101,8 @@ # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: - import warnings import sys + import warnings _py_version_str = sys.version.split()[0] _package_label = "google.cloud.bigtable_v2" diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/__init__.py index c74141156324..d24937f6abc5 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/__init__.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import BigtableClient from .async_client import BigtableAsyncClient +from .client import BigtableClient __all__ = ( "BigtableClient", diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/async_client.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/async_client.py index 0a9442287e22..aeee8fb53bd1 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/async_client.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/async_client.py @@ -13,47 +13,46 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import logging as std_logging from collections import OrderedDict +import logging as std_logging import re from typing import ( - Dict, + AsyncIterable, + Awaitable, Callable, + Dict, Mapping, MutableMapping, MutableSequence, Optional, - AsyncIterable, - Awaitable, Sequence, Tuple, Type, Union, ) -from google.cloud.bigtable_v2 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore import google.protobuf +from google.cloud.bigtable_v2 import gapic_version as package_version try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore -from google.cloud.bigtable_v2.types import bigtable -from google.cloud.bigtable_v2.types import data -from google.cloud.bigtable_v2.types import request_stats from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BigtableTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import BigtableGrpcAsyncIOTransport + +from google.cloud.bigtable_v2.types import bigtable, data, request_stats + from .client import BigtableClient +from .transports.base import DEFAULT_CLIENT_INFO, BigtableTransport +from .transports.grpc_asyncio import BigtableGrpcAsyncIOTransport try: from google.api_core import client_logging # type: ignore diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/client.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/client.py index 5eb6ba894cba..c87439cb9dd6 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/client.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/client.py @@ -20,13 +20,13 @@ import os import re from typing import ( - Dict, Callable, + Dict, + Iterable, Mapping, MutableMapping, MutableSequence, Optional, - Iterable, Sequence, Tuple, Type, @@ -35,19 +35,19 @@ ) import warnings -from google.cloud.bigtable_v2 import gapic_version as package_version - from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore import google.protobuf +from google.cloud.bigtable_v2 import gapic_version as package_version + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -62,11 +62,11 @@ _LOGGER = std_logging.getLogger(__name__) -from google.cloud.bigtable_v2.types import bigtable -from google.cloud.bigtable_v2.types import data -from google.cloud.bigtable_v2.types import request_stats from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BigtableTransport, DEFAULT_CLIENT_INFO + +from google.cloud.bigtable_v2.types import bigtable, data, request_stats + +from .transports.base import DEFAULT_CLIENT_INFO, BigtableTransport from .transports.grpc import BigtableGrpcTransport from .transports.grpc_asyncio import BigtableGrpcAsyncIOTransport from .transports.rest import BigtableRestTransport diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/__init__.py index b35e85534182..c8cf1876eefd 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/__init__.py @@ -19,9 +19,7 @@ from .base import BigtableTransport from .grpc import BigtableGrpcTransport from .grpc_asyncio import BigtableGrpcAsyncIOTransport -from .rest import BigtableRestTransport -from .rest import BigtableRestInterceptor - +from .rest import BigtableRestInterceptor, BigtableRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[BigtableTransport]] diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/base.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/base.py index f08bca73ede0..cb44399543e7 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/base.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/base.py @@ -16,17 +16,16 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.bigtable_v2 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore import google.protobuf +from google.cloud.bigtable_v2 import gapic_version as package_version from google.cloud.bigtable_v2.types import bigtable DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/grpc.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/grpc.py index 8ddbf15a20c0..fec66e0e64d1 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/grpc.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/grpc.py @@ -16,22 +16,21 @@ import json import logging as std_logging import pickle -import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, grpc_helpers import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message - import grpc # type: ignore import proto # type: ignore from google.cloud.bigtable_v2.types import bigtable -from .base import BigtableTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, BigtableTransport try: from google.api_core import client_logging # type: ignore diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/grpc_asyncio.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/grpc_asyncio.py index 3e6b70832307..9dc0d89d05d2 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/grpc_asyncio.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/grpc_asyncio.py @@ -15,26 +15,25 @@ # import inspect import json -import pickle import logging as std_logging -import warnings +import pickle from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message - import grpc # type: ignore -import proto # type: ignore from grpc.experimental import aio # type: ignore +import proto # type: ignore from google.cloud.bigtable_v2.types import bigtable -from .base import BigtableTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, BigtableTransport from .grpc import BigtableGrpcTransport try: diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/rest.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/rest.py index f0a761a360c3..d7c925e9f6ce 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/rest.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/rest.py @@ -13,31 +13,25 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import logging +import dataclasses import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore import google.protobuf - from google.protobuf import json_format - from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - from google.cloud.bigtable_v2.types import bigtable - -from .rest_base import _BaseBigtableRestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseBigtableRestTransport try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/rest_base.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/rest_base.py index 5eab0ded45e2..429dcc9604b8 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/rest_base.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/services/bigtable/transports/rest_base.py @@ -14,18 +14,16 @@ # limitations under the License. # import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import BigtableTransport, DEFAULT_CLIENT_INFO - import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +from google.api_core import gapic_v1, path_template +from google.protobuf import json_format from google.cloud.bigtable_v2.types import bigtable +from .base import DEFAULT_CLIENT_INFO, BigtableTransport + class _BaseBigtableRestTransport(BigtableTransport): """Base REST backend transport for Bigtable. diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/__init__.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/__init__.py index b13c076a2cf6..65cc5dd98e1e 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/__init__.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/__init__.py @@ -65,24 +65,16 @@ Value, ValueRange, ) -from .feature_flags import ( - FeatureFlags, -) -from .peer_info import ( - PeerInfo, -) +from .feature_flags import FeatureFlags +from .peer_info import PeerInfo from .request_stats import ( FullReadStatsView, ReadIterationStats, RequestLatencyStats, RequestStats, ) -from .response_params import ( - ResponseParams, -) -from .types import ( - Type, -) +from .response_params import ResponseParams +from .types import Type __all__ = ( "CheckAndMutateRowRequest", diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/bigtable.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/bigtable.py index 19abba67b7d6..a8a7ad6d5f20 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/bigtable.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/bigtable.py @@ -17,16 +17,15 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - -from google.cloud.bigtable_v2.types import data -from google.cloud.bigtable_v2.types import request_stats as gb_request_stats -from google.cloud.bigtable_v2.types import types from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore +import proto # type: ignore +from google.cloud.bigtable_v2.types import data +from google.cloud.bigtable_v2.types import request_stats as gb_request_stats +from google.cloud.bigtable_v2.types import types __protobuf__ = proto.module( package="google.bigtable.v2", diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/data.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/data.py index 12ac8b2b1cbb..bcec249c9c09 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/data.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/data.py @@ -17,12 +17,11 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - -from google.cloud.bigtable_v2.types import types from google.protobuf import timestamp_pb2 # type: ignore from google.type import date_pb2 # type: ignore +import proto # type: ignore +from google.cloud.bigtable_v2.types import types __protobuf__ = proto.module( package="google.bigtable.v2", diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/feature_flags.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/feature_flags.py index 2c8ea8732746..b7cbaac422c2 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/feature_flags.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/feature_flags.py @@ -19,7 +19,6 @@ import proto # type: ignore - __protobuf__ = proto.module( package="google.bigtable.v2", manifest={ diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/peer_info.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/peer_info.py index b3f1203cc9e4..93e9fb2f32e0 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/peer_info.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/peer_info.py @@ -19,7 +19,6 @@ import proto # type: ignore - __protobuf__ = proto.module( package="google.bigtable.v2", manifest={ diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/request_stats.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/request_stats.py index 540e6548d052..a8a225dd94dd 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/request_stats.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/request_stats.py @@ -17,10 +17,8 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - from google.protobuf import duration_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( package="google.bigtable.v2", diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/response_params.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/response_params.py index cc6384ab3465..51b87712ff43 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/response_params.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/response_params.py @@ -19,7 +19,6 @@ import proto # type: ignore - __protobuf__ = proto.module( package="google.bigtable.v2", manifest={ diff --git a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/types.py b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/types.py index 0b4ddb57a6f5..f9344f293d42 100644 --- a/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/types.py +++ b/packages/google-cloud-bigtable/google/cloud/bigtable_v2/types/types.py @@ -19,7 +19,6 @@ import proto # type: ignore - __protobuf__ = proto.module( package="google.bigtable.v2", manifest={ diff --git a/packages/google-cloud-bigtable/noxfile.py b/packages/google-cloud-bigtable/noxfile.py index 72ab3f2db08b..fa15d57cac56 100644 --- a/packages/google-cloud-bigtable/noxfile.py +++ b/packages/google-cloud-bigtable/noxfile.py @@ -271,8 +271,8 @@ def install_systemtest_dependencies(session, *constraints): @nox.session(python=DEFAULT_PYTHON_VERSION) def system_emulated(session): - import subprocess import signal + import subprocess try: subprocess.call(["gcloud", "--version"]) diff --git a/packages/google-cloud-bigtable/scripts/conformance.sh b/packages/google-cloud-bigtable/scripts/conformance.sh deleted file mode 100755 index fd585142ec27..000000000000 --- a/packages/google-cloud-bigtable/scripts/conformance.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash - -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -## cd to the parent directory, i.e. the root of the git repo -cd $(dirname $0)/.. - -# Build and start the proxy in a separate process -pushd test_proxy -nohup python test_proxy.py --port $PROXY_PORT --client_type=$CLIENT_TYPE & -proxyPID=$! -popd - -# Kill proxy on exit -function cleanup() { - echo "Cleanup testbench"; - kill $proxyPID -} -trap cleanup EXIT - -# Run the conformance test -echo "running tests with args: $TEST_ARGS" -pushd cloud-bigtable-clients-test/tests -eval "go test -v -proxy_addr=:$PROXY_PORT $TEST_ARGS" -RETURN_CODE=$? -popd - -echo "exiting with ${RETURN_CODE}" -exit ${RETURN_CODE} diff --git a/packages/google-cloud-bigtable/setup.py b/packages/google-cloud-bigtable/setup.py index 7c005aa0b2bc..1adac7449e79 100644 --- a/packages/google-cloud-bigtable/setup.py +++ b/packages/google-cloud-bigtable/setup.py @@ -21,7 +21,6 @@ import setuptools - package_root = os.path.abspath(os.path.dirname(__file__)) # Package metadata. diff --git a/packages/google-cloud-bigtable/tests/system/admin_overlay/conftest.py b/packages/google-cloud-bigtable/tests/system/admin_overlay/conftest.py index 66baef3f4d7a..c3698e3ae3d2 100644 --- a/packages/google-cloud-bigtable/tests/system/admin_overlay/conftest.py +++ b/packages/google-cloud-bigtable/tests/system/admin_overlay/conftest.py @@ -1,9 +1,8 @@ -import google.auth - import os -import pytest import uuid +import google.auth +import pytest INSTANCE_PREFIX = "admin-overlay-instance" BACKUP_PREFIX = "admin-overlay-backup" diff --git a/packages/google-cloud-bigtable/tests/system/admin_overlay/test_system_async.py b/packages/google-cloud-bigtable/tests/system/admin_overlay/test_system_async.py index aa412569edd8..2ac5fbf287b6 100644 --- a/packages/google-cloud-bigtable/tests/system/admin_overlay/test_system_async.py +++ b/packages/google-cloud-bigtable/tests/system/admin_overlay/test_system_async.py @@ -12,35 +12,33 @@ # See the License for the specific language governing permissions and # limitations under the License. +from datetime import datetime, timedelta +import os from typing import Tuple +from google.cloud.environment_vars import BIGTABLE_EMULATOR +import pytest + from google.cloud import bigtable_admin_v2 as admin_v2 -from google.cloud.bigtable.data._cross_sync import CrossSync from google.cloud.bigtable.data import mutations, read_rows_query -from google.cloud.environment_vars import BIGTABLE_EMULATOR +from google.cloud.bigtable.data._cross_sync import CrossSync from .conftest import ( - INSTANCE_PREFIX, BACKUP_PREFIX, - ROW_PREFIX, DEFAULT_CLUSTER_LOCATIONS, + INITIAL_CELL_VALUE, + INSTANCE_PREFIX, + NEW_CELL_VALUE, + NUM_ROWS, REPLICATION_CLUSTER_LOCATIONS, - TEST_TABLE_NAME, + ROW_PREFIX, TEST_BACKUP_TABLE_NAME, TEST_COLUMMN_FAMILY_NAME, TEST_COLUMN_NAME, - NUM_ROWS, - INITIAL_CELL_VALUE, - NEW_CELL_VALUE, + TEST_TABLE_NAME, generate_unique_suffix, ) -from datetime import datetime, timedelta - -import pytest -import os - - if CrossSync.is_async: from google.api_core import operation_async as api_core_operation else: diff --git a/packages/google-cloud-bigtable/tests/system/admin_overlay/test_system_autogen.py b/packages/google-cloud-bigtable/tests/system/admin_overlay/test_system_autogen.py index 4fde3571fa7e..34a991b00084 100644 --- a/packages/google-cloud-bigtable/tests/system/admin_overlay/test_system_autogen.py +++ b/packages/google-cloud-bigtable/tests/system/admin_overlay/test_system_autogen.py @@ -15,30 +15,33 @@ # This file is automatically generated by CrossSync. Do not edit manually. +from datetime import datetime, timedelta +import os from typing import Tuple + +from google.api_core import operation as api_core_operation +from google.cloud.environment_vars import BIGTABLE_EMULATOR +import pytest + from google.cloud import bigtable_admin_v2 as admin_v2 -from google.cloud.bigtable.data._cross_sync import CrossSync from google.cloud.bigtable.data import mutations, read_rows_query -from google.cloud.environment_vars import BIGTABLE_EMULATOR +from google.cloud.bigtable.data._cross_sync import CrossSync + from .conftest import ( - INSTANCE_PREFIX, BACKUP_PREFIX, - ROW_PREFIX, DEFAULT_CLUSTER_LOCATIONS, + INITIAL_CELL_VALUE, + INSTANCE_PREFIX, + NEW_CELL_VALUE, + NUM_ROWS, REPLICATION_CLUSTER_LOCATIONS, - TEST_TABLE_NAME, + ROW_PREFIX, TEST_BACKUP_TABLE_NAME, TEST_COLUMMN_FAMILY_NAME, TEST_COLUMN_NAME, - NUM_ROWS, - INITIAL_CELL_VALUE, - NEW_CELL_VALUE, + TEST_TABLE_NAME, generate_unique_suffix, ) -from datetime import datetime, timedelta -import pytest -import os -from google.api_core import operation as api_core_operation if os.getenv(BIGTABLE_EMULATOR): pytest.skip( diff --git a/packages/google-cloud-bigtable/tests/system/conftest.py b/packages/google-cloud-bigtable/tests/system/conftest.py index 8c0eb30b1565..288785f46f87 100644 --- a/packages/google-cloud-bigtable/tests/system/conftest.py +++ b/packages/google-cloud-bigtable/tests/system/conftest.py @@ -14,11 +14,11 @@ """ Import pytest fixtures for setting up table for data client system tests """ -import sys +import asyncio import os +import sys import pytest -import asyncio script_path = os.path.dirname(os.path.realpath(__file__)) sys.path.append(script_path) diff --git a/packages/google-cloud-bigtable/tests/system/cross_sync/test_cross_sync_e2e.py b/packages/google-cloud-bigtable/tests/system/cross_sync/test_cross_sync_e2e.py index 86911b1631ea..1130ff616ad7 100644 --- a/packages/google-cloud-bigtable/tests/system/cross_sync/test_cross_sync_e2e.py +++ b/packages/google-cloud-bigtable/tests/system/cross_sync/test_cross_sync_e2e.py @@ -1,6 +1,7 @@ import ast -import sys import os +import sys + import black import pytest import yaml @@ -10,12 +11,12 @@ cross_sync_path = os.path.join(test_dir_name, "..", "..", "..", ".cross_sync") sys.path.append(cross_sync_path) -from transformers import ( # noqa: F401 E402 - SymbolReplacer, - AsyncToSync, +from transformers import ( + AsyncToSync, # noqa: F401 E402 + CrossSyncFileProcessor, RmAioFunctions, StripAsyncConditionalBranches, - CrossSyncFileProcessor, + SymbolReplacer, ) diff --git a/packages/google-cloud-bigtable/tests/system/data/setup_fixtures.py b/packages/google-cloud-bigtable/tests/system/data/setup_fixtures.py index 169e2396bdea..1416d6b7ab90 100644 --- a/packages/google-cloud-bigtable/tests/system/data/setup_fixtures.py +++ b/packages/google-cloud-bigtable/tests/system/data/setup_fixtures.py @@ -16,11 +16,12 @@ Bigtable database for testing purposes. """ -import pytest import os import uuid -from . import TEST_FAMILY, TEST_FAMILY_2, TEST_AGGREGATE_FAMILY +import pytest + +from . import TEST_AGGREGATE_FAMILY, TEST_FAMILY, TEST_FAMILY_2 # authorized view subset to allow all qualifiers ALLOW_ALL = "" @@ -43,10 +44,11 @@ def instance_id(admin_client, project_id, cluster_config): """ Returns BIGTABLE_TEST_INSTANCE if set, otherwise creates a new temporary instance for the test session """ - from google.cloud.bigtable_admin_v2 import types from google.api_core import exceptions from google.cloud.environment_vars import BIGTABLE_EMULATOR + from google.cloud.bigtable_admin_v2 import types + # use user-specified instance if available user_specified_instance = os.getenv("BIGTABLE_TEST_INSTANCE") if user_specified_instance: @@ -109,8 +111,7 @@ def table_id( Supplied by the init_table_id fixture. - column_split_config: A list of row keys to use as initial splits when creating the test table. """ - from google.api_core import exceptions - from google.api_core import retry + from google.api_core import exceptions, retry # use user-specified instance if available user_specified_table = os.getenv("BIGTABLE_TEST_TABLE") @@ -162,8 +163,7 @@ def authorized_view_id( - instance_id: The ID of the Bigtable instance to test against. Supplied by the instance_id fixture. - table_id: The ID of the table to create the authorized view for. Supplied by the table_id fixture. """ - from google.api_core import exceptions - from google.api_core import retry + from google.api_core import exceptions, retry retry = retry.Retry( predicate=retry.if_exception_type(exceptions.FailedPrecondition) diff --git a/packages/google-cloud-bigtable/tests/system/data/test_system_async.py b/packages/google-cloud-bigtable/tests/system/data/test_system_async.py index ac8a358a3ec1..3501eb9c0794 100644 --- a/packages/google-cloud-bigtable/tests/system/data/test_system_async.py +++ b/packages/google-cloud-bigtable/tests/system/data/test_system_async.py @@ -12,21 +12,21 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest import datetime -import uuid import os +import uuid + from google.api_core import retry from google.api_core.exceptions import ClientError, PermissionDenied - -from google.cloud.bigtable.data.execute_query.metadata import SqlType -from google.cloud.bigtable.data.read_modify_write_rules import _MAX_INCREMENT_VALUE from google.cloud.environment_vars import BIGTABLE_EMULATOR from google.type import date_pb2 +import pytest from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data.execute_query.metadata import SqlType +from google.cloud.bigtable.data.read_modify_write_rules import _MAX_INCREMENT_VALUE -from . import TEST_FAMILY, TEST_FAMILY_2, TEST_AGGREGATE_FAMILY +from . import TEST_AGGREGATE_FAMILY, TEST_FAMILY, TEST_FAMILY_2 if CrossSync.is_async: from google.cloud.bigtable_v2.services.bigtable.transports.grpc_asyncio import ( @@ -413,9 +413,11 @@ async def test_bulk_mutations_raise_exception(self, client, target): """ If an invalid mutation is passed, an exception should be raised """ + from google.cloud.bigtable.data.exceptions import ( + FailedMutationEntryError, + MutationsExceptionGroup, + ) from google.cloud.bigtable.data.mutations import RowMutationEntry, SetCell - from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup - from google.cloud.bigtable.data.exceptions import FailedMutationEntryError row_key = uuid.uuid4().hex.encode() mutation = SetCell( @@ -723,8 +725,10 @@ async def test_read_modify_write_row_chained(self, client, target, temp_rows): """ test read_modify_write_row with multiple rules """ - from google.cloud.bigtable.data.read_modify_write_rules import AppendValueRule - from google.cloud.bigtable.data.read_modify_write_rules import IncrementRule + from google.cloud.bigtable.data.read_modify_write_rules import ( + AppendValueRule, + IncrementRule, + ) row_key = b"test-row-key" family = TEST_FAMILY @@ -893,8 +897,7 @@ async def test_read_rows_sharded_from_sample(self, target, temp_rows): """ Test end-to-end sharding """ - from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery - from google.cloud.bigtable.data.read_rows_query import RowRange + from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery, RowRange await temp_rows.add_row(b"a") await temp_rows.add_row(b"b") @@ -949,8 +952,7 @@ async def test_read_rows_range_query(self, target, temp_rows): """ Ensure that the read_rows method works """ - from google.cloud.bigtable.data import ReadRowsQuery - from google.cloud.bigtable.data import RowRange + from google.cloud.bigtable.data import ReadRowsQuery, RowRange await temp_rows.add_row(b"a") await temp_rows.add_row(b"b") @@ -1143,8 +1145,8 @@ async def test_literal_value_filter( Literal value filter does complex escaping on re2 strings. Make sure inputs are properly interpreted by the server """ - from google.cloud.bigtable.data.row_filters import LiteralValueFilter from google.cloud.bigtable.data import ReadRowsQuery + from google.cloud.bigtable.data.row_filters import LiteralValueFilter f = LiteralValueFilter(filter_input) await temp_rows.add_row(b"row_key_1", value=cell_value) diff --git a/packages/google-cloud-bigtable/tests/system/data/test_system_autogen.py b/packages/google-cloud-bigtable/tests/system/data/test_system_autogen.py index 463235087487..1e3f7ac20f3f 100644 --- a/packages/google-cloud-bigtable/tests/system/data/test_system_autogen.py +++ b/packages/google-cloud-bigtable/tests/system/data/test_system_autogen.py @@ -15,22 +15,25 @@ # This file is automatically generated by CrossSync. Do not edit manually. -import pytest import datetime -import uuid import os +import uuid + from google.api_core import retry from google.api_core.exceptions import ClientError, PermissionDenied -from google.cloud.bigtable.data.execute_query.metadata import SqlType -from google.cloud.bigtable.data.read_modify_write_rules import _MAX_INCREMENT_VALUE from google.cloud.environment_vars import BIGTABLE_EMULATOR from google.type import date_pb2 +import pytest + from google.cloud.bigtable.data._cross_sync import CrossSync -from . import TEST_FAMILY, TEST_FAMILY_2, TEST_AGGREGATE_FAMILY +from google.cloud.bigtable.data.execute_query.metadata import SqlType +from google.cloud.bigtable.data.read_modify_write_rules import _MAX_INCREMENT_VALUE from google.cloud.bigtable_v2.services.bigtable.transports.grpc import ( _LoggingClientInterceptor as GapicInterceptor, ) +from . import TEST_AGGREGATE_FAMILY, TEST_FAMILY, TEST_FAMILY_2 + TARGETS = ["table"] if not os.environ.get(BIGTABLE_EMULATOR): TARGETS.append("authorized_view") @@ -320,9 +323,11 @@ def test_bulk_mutations_set_cell(self, client, target, temp_rows): def test_bulk_mutations_raise_exception(self, client, target): """If an invalid mutation is passed, an exception should be raised""" + from google.cloud.bigtable.data.exceptions import ( + FailedMutationEntryError, + MutationsExceptionGroup, + ) from google.cloud.bigtable.data.mutations import RowMutationEntry, SetCell - from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup - from google.cloud.bigtable.data.exceptions import FailedMutationEntryError row_key = uuid.uuid4().hex.encode() mutation = SetCell( @@ -573,8 +578,10 @@ def test_read_modify_write_row_append( @pytest.mark.usefixtures("target") def test_read_modify_write_row_chained(self, client, target, temp_rows): """test read_modify_write_row with multiple rules""" - from google.cloud.bigtable.data.read_modify_write_rules import AppendValueRule - from google.cloud.bigtable.data.read_modify_write_rules import IncrementRule + from google.cloud.bigtable.data.read_modify_write_rules import ( + AppendValueRule, + IncrementRule, + ) row_key = b"test-row-key" family = TEST_FAMILY @@ -711,8 +718,7 @@ def test_read_rows_sharded_simple(self, target, temp_rows): ) def test_read_rows_sharded_from_sample(self, target, temp_rows): """Test end-to-end sharding""" - from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery - from google.cloud.bigtable.data.read_rows_query import RowRange + from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery, RowRange temp_rows.add_row(b"a") temp_rows.add_row(b"b") @@ -759,8 +765,7 @@ def test_read_rows_sharded_filters_limits(self, target, temp_rows): ) def test_read_rows_range_query(self, target, temp_rows): """Ensure that the read_rows method works""" - from google.cloud.bigtable.data import ReadRowsQuery - from google.cloud.bigtable.data import RowRange + from google.cloud.bigtable.data import ReadRowsQuery, RowRange temp_rows.add_row(b"a") temp_rows.add_row(b"b") @@ -924,8 +929,8 @@ def test_literal_value_filter( ): """Literal value filter does complex escaping on re2 strings. Make sure inputs are properly interpreted by the server""" - from google.cloud.bigtable.data.row_filters import LiteralValueFilter from google.cloud.bigtable.data import ReadRowsQuery + from google.cloud.bigtable.data.row_filters import LiteralValueFilter f = LiteralValueFilter(filter_input) temp_rows.add_row(b"row_key_1", value=cell_value) diff --git a/packages/google-cloud-bigtable/tests/system/v2_client/_helpers.py b/packages/google-cloud-bigtable/tests/system/v2_client/_helpers.py index e792def15914..695976681f9c 100644 --- a/packages/google-cloud-bigtable/tests/system/v2_client/_helpers.py +++ b/packages/google-cloud-bigtable/tests/system/v2_client/_helpers.py @@ -14,11 +14,11 @@ from datetime import datetime, timezone -import grpc from google.api_core import exceptions -from google.cloud import exceptions as core_exceptions +import grpc from test_utils import retry +from google.cloud import exceptions as core_exceptions retry_429 = retry.RetryErrors(exceptions.TooManyRequests, max_tries=9) retry_504 = retry.RetryErrors(exceptions.DeadlineExceeded) diff --git a/packages/google-cloud-bigtable/tests/system/v2_client/conftest.py b/packages/google-cloud-bigtable/tests/system/v2_client/conftest.py index f39fcba88962..32ff310b5c59 100644 --- a/packages/google-cloud-bigtable/tests/system/v2_client/conftest.py +++ b/packages/google-cloud-bigtable/tests/system/v2_client/conftest.py @@ -14,11 +14,11 @@ import os +from google.cloud.environment_vars import BIGTABLE_EMULATOR import pytest from test_utils.system import unique_resource_id from google.cloud.bigtable.client import Client -from google.cloud.environment_vars import BIGTABLE_EMULATOR from . import _helpers diff --git a/packages/google-cloud-bigtable/tests/system/v2_client/test_data_api.py b/packages/google-cloud-bigtable/tests/system/v2_client/test_data_api.py index c012eb32a414..7c65ad3e9fe4 100644 --- a/packages/google-cloud-bigtable/tests/system/v2_client/test_data_api.py +++ b/packages/google-cloud-bigtable/tests/system/v2_client/test_data_api.py @@ -158,8 +158,7 @@ def test_table_drop_by_prefix(data_table, rows_to_delete): def test_table_read_rows_w_row_set(data_table, rows_to_delete): - from google.cloud.bigtable.row_set import RowSet - from google.cloud.bigtable.row_set import RowRange + from google.cloud.bigtable.row_set import RowRange, RowSet row_keys = [ b"row_key_1", @@ -231,8 +230,11 @@ def test_table_read_row_large_cell(data_table, rows_to_delete, skip_on_emulator) def _write_to_row(row1, row2, row3, row4): - from google.cloud._helpers import _datetime_from_microseconds - from google.cloud._helpers import _microseconds_from_datetime + from google.cloud._helpers import ( + _datetime_from_microseconds, + _microseconds_from_datetime, + ) + from google.cloud.bigtable.row_data import Cell timestamp1 = datetime.now(timezone.utc) @@ -325,10 +327,12 @@ def test_table_read_rows(data_table, rows_to_delete): def test_read_with_label_applied(data_table, rows_to_delete, skip_on_emulator): - from google.cloud.bigtable.row_filters import ApplyLabelFilter - from google.cloud.bigtable.row_filters import ColumnQualifierRegexFilter - from google.cloud.bigtable.row_filters import RowFilterChain - from google.cloud.bigtable.row_filters import RowFilterUnion + from google.cloud.bigtable.row_filters import ( + ApplyLabelFilter, + ColumnQualifierRegexFilter, + RowFilterChain, + RowFilterUnion, + ) row = data_table.direct_row(ROW_KEY) rows_to_delete.append(row) @@ -387,8 +391,10 @@ def test_mutations_batcher_threading(data_table, rows_to_delete): Test the mutations batcher by sending a bunch of mutations using different flush methods """ - import mock import time + + import mock + from google.cloud.bigtable.batcher import MutationsBatcher num_sent = 20 diff --git a/packages/google-cloud-bigtable/tests/system/v2_client/test_table_admin.py b/packages/google-cloud-bigtable/tests/system/v2_client/test_table_admin.py index c501890137a3..b6e02f361cdd 100644 --- a/packages/google-cloud-bigtable/tests/system/v2_client/test_table_admin.py +++ b/packages/google-cloud-bigtable/tests/system/v2_client/test_table_admin.py @@ -16,8 +16,8 @@ import operator import time -import pytest from google.api_core.datetime_helpers import DatetimeWithNanoseconds +import pytest from . import _helpers @@ -219,8 +219,7 @@ def test_table_get_iam_policy( def test_table_set_iam_policy( service_account, data_instance_populated, tables_to_delete, skip_on_emulator ): - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE - from google.cloud.bigtable.policy import Policy + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE, Policy temp_table_id = "test-set-iam-policy-table" temp_table = data_instance_populated.table(temp_table_id) @@ -264,6 +263,7 @@ def test_table_backup( skip_on_emulator, ): from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable import enums temp_table_id = "test-backup-table" diff --git a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_client.py b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_client.py index 0d844a9e4ced..bd70826c0d39 100644 --- a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_client.py +++ b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_client.py @@ -20,30 +20,23 @@ except ImportError: # pragma: NO COVER import mock -from google.api_core import exceptions -from google.api_core import gapic_v1 +from google.api_core import exceptions, gapic_v1 from google.api_core import retry as retries from google.auth.credentials import AnonymousCredentials -from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import transports -from google.cloud.bigtable_admin_v2.types import bigtable_table_admin +import pytest +from test_async_consistency import FALSE_CONSISTENCY_RESPONSE, TRUE_CONSISTENCY_RESPONSE + +from google.cloud.bigtable import __version__ as bigtable_version from google.cloud.bigtable_admin_v2.overlay.services.bigtable_table_admin.async_client import ( - BigtableTableAdminAsyncClient, DEFAULT_CLIENT_INFO, + BigtableTableAdminAsyncClient, ) from google.cloud.bigtable_admin_v2.overlay.types import ( async_restore_table, wait_for_consistency_request, ) - -from google.cloud.bigtable import __version__ as bigtable_version - -from test_async_consistency import ( - FALSE_CONSISTENCY_RESPONSE, - TRUE_CONSISTENCY_RESPONSE, -) - -import pytest - +from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import transports +from google.cloud.bigtable_admin_v2.types import bigtable_table_admin PARENT_NAME = "my_parent" TABLE_NAME = "my_table" diff --git a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_consistency.py b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_consistency.py index b64ae1a117ff..d4a787fddd33 100644 --- a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_consistency.py +++ b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_consistency.py @@ -20,11 +20,10 @@ except ImportError: # pragma: NO COVER import mock -from google.cloud.bigtable_admin_v2.overlay.types import async_consistency -from google.cloud.bigtable_admin_v2.types import bigtable_table_admin - import pytest +from google.cloud.bigtable_admin_v2.overlay.types import async_consistency +from google.cloud.bigtable_admin_v2.types import bigtable_table_admin TRUE_CONSISTENCY_RESPONSE = bigtable_table_admin.CheckConsistencyResponse( consistent=True diff --git a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_restore_table.py b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_restore_table.py index 95799fc147a4..b11891d8f9ea 100644 --- a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_restore_table.py +++ b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_async_restore_table.py @@ -19,17 +19,15 @@ except ImportError: # pragma: NO COVER import mock -from google.longrunning import operations_pb2 -from google.rpc import status_pb2, code_pb2 - -from google.api_core import operation_async, exceptions +from google.api_core import exceptions, operation_async from google.api_core.future import async_future from google.api_core.operations_v1 import operations_async_client -from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table -from google.cloud.bigtable_admin_v2.overlay.types import async_restore_table - +from google.longrunning import operations_pb2 +from google.rpc import code_pb2, status_pb2 import pytest +from google.cloud.bigtable_admin_v2.overlay.types import async_restore_table +from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table # Set up the mock operations DEFAULT_MAX_POLL = 3 diff --git a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_client.py b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_client.py index 07922b349458..7946499df31e 100644 --- a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_client.py +++ b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_client.py @@ -19,30 +19,23 @@ except ImportError: # pragma: NO COVER import mock -from google.api_core import exceptions -from google.api_core import gapic_v1 +from google.api_core import exceptions, gapic_v1 from google.api_core import retry as retries from google.auth.credentials import AnonymousCredentials -from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import transports -from google.cloud.bigtable_admin_v2.types import bigtable_table_admin +import pytest +from test_consistency import FALSE_CONSISTENCY_RESPONSE, TRUE_CONSISTENCY_RESPONSE + +from google.cloud.bigtable import __version__ as bigtable_version from google.cloud.bigtable_admin_v2.overlay.services.bigtable_table_admin.client import ( - BigtableTableAdminClient, DEFAULT_CLIENT_INFO, + BigtableTableAdminClient, ) from google.cloud.bigtable_admin_v2.overlay.types import ( restore_table, wait_for_consistency_request, ) - -from google.cloud.bigtable import __version__ as bigtable_version - -from test_consistency import ( - FALSE_CONSISTENCY_RESPONSE, - TRUE_CONSISTENCY_RESPONSE, -) - -import pytest - +from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import transports +from google.cloud.bigtable_admin_v2.types import bigtable_table_admin PARENT_NAME = "my_parent" TABLE_NAME = "my_table" diff --git a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_consistency.py b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_consistency.py index 29bc0c4817ac..92e941773e0f 100644 --- a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_consistency.py +++ b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_consistency.py @@ -19,11 +19,10 @@ except ImportError: # pragma: NO COVER import mock -from google.cloud.bigtable_admin_v2.overlay.types import consistency -from google.cloud.bigtable_admin_v2.types import bigtable_table_admin - import pytest +from google.cloud.bigtable_admin_v2.overlay.types import consistency +from google.cloud.bigtable_admin_v2.types import bigtable_table_admin TRUE_CONSISTENCY_RESPONSE = bigtable_table_admin.CheckConsistencyResponse( consistent=True diff --git a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_oneof_message.py b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_oneof_message.py index b9c521235caf..2619e6860e4b 100644 --- a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_oneof_message.py +++ b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_oneof_message.py @@ -13,13 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.bigtable_admin_v2.types import GcRule from google.protobuf import duration_pb2 - import my_oneof_message - import pytest +from google.cloud.bigtable_admin_v2.types import GcRule # The following proto bytestring was constructed running printproto in # text-to-binary mode on the following textproto for GcRule: diff --git a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_restore_table.py b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_restore_table.py index 23c6609e46d4..b56d1dd45e4d 100644 --- a/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_restore_table.py +++ b/packages/google-cloud-bigtable/tests/unit/admin_overlay/test_restore_table.py @@ -18,16 +18,14 @@ except ImportError: # pragma: NO COVER import mock -from google.longrunning import operations_pb2 -from google.rpc import status_pb2, code_pb2 - -from google.api_core import operation, exceptions +from google.api_core import exceptions, operation from google.api_core.operations_v1 import operations_client -from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table -from google.cloud.bigtable_admin_v2.overlay.types import restore_table - +from google.longrunning import operations_pb2 +from google.rpc import code_pb2, status_pb2 import pytest +from google.cloud.bigtable_admin_v2.overlay.types import restore_table +from google.cloud.bigtable_admin_v2.types import bigtable_table_admin, table # Set up the mock operations DEFAULT_MAX_POLL = 3 diff --git a/packages/google-cloud-bigtable/tests/unit/data/_async/test__mutate_rows.py b/packages/google-cloud-bigtable/tests/unit/data/_async/test__mutate_rows.py index f14fa6dee12a..d7029625b54f 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_async/test__mutate_rows.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_async/test__mutate_rows.py @@ -12,16 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest - -from google.cloud.bigtable_v2.types import MutateRowsResponse -from google.cloud.bigtable.data.mutations import RowMutationEntry -from google.cloud.bigtable.data.mutations import DeleteAllFromRow +from google.api_core.exceptions import DeadlineExceeded, Forbidden from google.rpc import status_pb2 -from google.api_core.exceptions import DeadlineExceeded -from google.api_core.exceptions import Forbidden +import pytest from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data.mutations import DeleteAllFromRow, RowMutationEntry +from google.cloud.bigtable_v2.types import MutateRowsResponse # try/except added for compatibility with python < 3.8 try: @@ -80,10 +77,10 @@ def test_ctor(self): """ test that constructor sets all the attributes correctly """ + from google.api_core.exceptions import Aborted, DeadlineExceeded + from google.cloud.bigtable.data._async._mutate_rows import _EntryWithProto from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete - from google.api_core.exceptions import DeadlineExceeded - from google.api_core.exceptions import Aborted client = mock.Mock() table = mock.Mock() @@ -198,8 +195,10 @@ async def test_mutate_rows_exception(self, exc_type): """ exceptions raised from retryable should be raised in MutationsExceptionGroup """ - from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup - from google.cloud.bigtable.data.exceptions import FailedMutationEntryError + from google.cloud.bigtable.data.exceptions import ( + FailedMutationEntryError, + MutationsExceptionGroup, + ) client = mock.Mock() table = mock.Mock() @@ -265,10 +264,13 @@ async def test_mutate_rows_incomplete_ignored(self): """ MutateRowsIncomplete exceptions should not be added to error list """ - from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete - from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup from google.api_core.exceptions import DeadlineExceeded + from google.cloud.bigtable.data.exceptions import ( + MutationsExceptionGroup, + _MutateRowsIncomplete, + ) + client = mock.Mock() table = mock.Mock() entries = [self._make_mutation()] diff --git a/packages/google-cloud-bigtable/tests/unit/data/_async/test__read_rows.py b/packages/google-cloud-bigtable/tests/unit/data/_async/test__read_rows.py index c43f46d5a66b..7fad973c43a3 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_async/test__read_rows.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_async/test__read_rows.py @@ -96,9 +96,9 @@ def test_ctor(self): def test_revise_request_rowset_keys_with_range( self, in_keys, last_key, expected, with_range ): - from google.cloud.bigtable_v2.types import RowSet as RowSetPB - from google.cloud.bigtable_v2.types import RowRange as RowRangePB from google.cloud.bigtable.data.exceptions import _RowSetComplete + from google.cloud.bigtable_v2.types import RowRange as RowRangePB + from google.cloud.bigtable_v2.types import RowSet as RowSetPB in_keys = [key.encode("utf-8") for key in in_keys] expected = [key.encode("utf-8") for key in expected] @@ -167,9 +167,9 @@ def test_revise_request_rowset_keys_with_range( def test_revise_request_rowset_ranges( self, in_ranges, last_key, expected, with_key ): - from google.cloud.bigtable_v2.types import RowSet as RowSetPB - from google.cloud.bigtable_v2.types import RowRange as RowRangePB from google.cloud.bigtable.data.exceptions import _RowSetComplete + from google.cloud.bigtable_v2.types import RowRange as RowRangePB + from google.cloud.bigtable_v2.types import RowSet as RowSetPB # convert to protobuf next_key = (last_key + "a").encode("utf-8") @@ -199,8 +199,8 @@ def test_revise_request_rowset_ranges( @pytest.mark.parametrize("last_key", ["a", "b", "c"]) def test_revise_request_full_table(self, last_key): - from google.cloud.bigtable_v2.types import RowSet as RowSetPB from google.cloud.bigtable_v2.types import RowRange as RowRangePB + from google.cloud.bigtable_v2.types import RowSet as RowSetPB # convert to protobuf last_key = last_key.encode("utf-8") @@ -216,8 +216,8 @@ def test_revise_request_full_table(self, last_key): def test_revise_to_empty_rowset(self): """revising to an empty rowset should raise error""" from google.cloud.bigtable.data.exceptions import _RowSetComplete - from google.cloud.bigtable_v2.types import RowSet as RowSetPB from google.cloud.bigtable_v2.types import RowRange as RowRangePB + from google.cloud.bigtable_v2.types import RowSet as RowSetPB row_keys = [b"a", b"b", b"c"] row_range = RowRangePB(end_key_open=b"c") @@ -284,8 +284,8 @@ async def test_revise_limit_over_limit(self, start_limit, emit_num): (unless start_num == 0, which represents unlimited) """ from google.cloud.bigtable.data import ReadRowsQuery - from google.cloud.bigtable_v2.types import ReadRowsResponse from google.cloud.bigtable.data.exceptions import InvalidChunk + from google.cloud.bigtable_v2.types import ReadRowsResponse async def awaitable_stream(): async def mock_stream(): diff --git a/packages/google-cloud-bigtable/tests/unit/data/_async/test__swappable_channel.py b/packages/google-cloud-bigtable/tests/unit/data/_async/test__swappable_channel.py index 14fef2c85944..01c08a9aafc1 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_async/test__swappable_channel.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_async/test__swappable_channel.py @@ -18,8 +18,8 @@ except ImportError: # pragma: NO COVER import mock # type: ignore -import pytest from grpc import ChannelConnectivity +import pytest from google.cloud.bigtable.data._cross_sync import CrossSync diff --git a/packages/google-cloud-bigtable/tests/unit/data/_async/test_client.py b/packages/google-cloud-bigtable/tests/unit/data/_async/test_client.py index 9f65d120bba6..6de29b55ace4 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_async/test_client.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_async/test_client.py @@ -13,30 +13,28 @@ # limitations under the License. from __future__ import annotations -import grpc import asyncio import re import sys -import pytest +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.auth.credentials import AnonymousCredentials +import grpc import mock +import pytest -from google.cloud.bigtable.data import mutations -from google.auth.credentials import AnonymousCredentials -from google.cloud.bigtable_v2.types import ReadRowsResponse -from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery -from google.api_core import exceptions as core_exceptions -from google.api_core import client_options -from google.cloud.bigtable.data.exceptions import InvalidChunk -from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete +from google.cloud.bigtable.data import TABLE_DEFAULT, mutations +from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data.exceptions import InvalidChunk, _MutateRowsIncomplete from google.cloud.bigtable.data.mutations import DeleteAllFromRow -from google.cloud.bigtable.data import TABLE_DEFAULT - -from google.cloud.bigtable.data.read_modify_write_rules import IncrementRule -from google.cloud.bigtable.data.read_modify_write_rules import AppendValueRule +from google.cloud.bigtable.data.read_modify_write_rules import ( + AppendValueRule, + IncrementRule, +) +from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery +from google.cloud.bigtable_v2.types import ReadRowsResponse from google.cloud.bigtable_v2.types.bigtable import ExecuteQueryResponse - -from google.cloud.bigtable.data._cross_sync import CrossSync from tests.unit.data.execute_query.sql_helpers import ( chunked_responses, column, @@ -51,10 +49,11 @@ if CrossSync.is_async: from google.api_core import grpc_helpers_async - from google.cloud.bigtable.data._async.client import TableAsync + from google.cloud.bigtable.data._async._swappable_channel import ( AsyncSwappableChannel, ) + from google.cloud.bigtable.data._async.client import TableAsync from google.cloud.bigtable.data._async.metrics_interceptor import ( AsyncBigtableMetricsInterceptor, ) @@ -64,10 +63,11 @@ CrossSync.add_mapping("MetricsInterceptor", AsyncBigtableMetricsInterceptor) else: from google.api_core import grpc_helpers - from google.cloud.bigtable.data._sync_autogen.client import Table # noqa: F401 + from google.cloud.bigtable.data._sync_autogen._swappable_channel import ( SwappableChannel, ) + from google.cloud.bigtable.data._sync_autogen.client import Table # noqa: F401 from google.cloud.bigtable.data._sync_autogen.metrics_interceptor import ( BigtableMetricsInterceptor, ) @@ -127,8 +127,8 @@ async def test_ctor(self): @CrossSync.pytest async def test_ctor_super_inits(self): - from google.cloud.client import ClientWithProject from google.api_core import client_options as client_options_lib + from google.cloud.client import ClientWithProject project = "project-id" credentials = AnonymousCredentials() @@ -441,8 +441,8 @@ async def test__manage_channel_sleeps( self, refresh_interval, num_cycles, expected_sleep ): # make sure that sleeps work as expected - import time import random + import time with mock.patch.object(random, "uniform") as uniform: uniform.side_effect = lambda min_, max_: min_ @@ -923,6 +923,7 @@ async def test_api_surface_context_manager(self, method): get_table and get_authorized_view should work as context managers """ from functools import partial + from google.cloud.bigtable.data._helpers import _WarmedInstanceKey expected_table_id = "table-id" @@ -1628,9 +1629,11 @@ def _make_table(self, *args, **kwargs): return CrossSync.TestTable._get_target_class()(client_mock, *args, **kwargs) def _make_stats(self): - from google.cloud.bigtable_v2.types import RequestStats - from google.cloud.bigtable_v2.types import FullReadStatsView - from google.cloud.bigtable_v2.types import ReadIterationStats + from google.cloud.bigtable_v2.types import ( + FullReadStatsView, + ReadIterationStats, + RequestStats, + ) return RequestStats( full_read_stats_view=FullReadStatsView( @@ -2156,8 +2159,10 @@ async def test_read_rows_sharded_errors(self): """ Errors should be exposed as ShardedReadRowsExceptionGroups """ - from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup - from google.cloud.bigtable.data.exceptions import FailedQueryShardError + from google.cloud.bigtable.data.exceptions import ( + FailedQueryShardError, + ShardedReadRowsExceptionGroup, + ) async with self._make_client() as client: async with client.get_table("instance", "table") as table: @@ -2258,9 +2263,10 @@ async def test_read_rows_sharded_expirary(self): If the operation times out before all shards complete, should raise a ShardedReadRowsExceptionGroup """ + from google.api_core.exceptions import DeadlineExceeded + from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup - from google.api_core.exceptions import DeadlineExceeded operation_timeout = 0.1 @@ -2299,10 +2305,11 @@ async def test_read_rows_sharded_negative_batch_timeout(self): They should raise DeadlineExceeded errors """ - from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup - from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT from google.api_core.exceptions import DeadlineExceeded + from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT + from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup + async def mock_call(*args, **kwargs): await CrossSync.sleep(0.06) return [mock.Mock()] @@ -2438,6 +2445,7 @@ async def test_sample_row_keys_retryable_errors(self, retryable_exception): retryable errors should be retried until timeout """ from google.api_core.exceptions import DeadlineExceeded + from google.cloud.bigtable.data.exceptions import RetryExceptionGroup async with self._make_client() as client: @@ -2545,6 +2553,7 @@ async def test_mutate_row(self, mutation_arg): @CrossSync.pytest async def test_mutate_row_retryable_errors(self, retryable_exception): from google.api_core.exceptions import DeadlineExceeded + from google.cloud.bigtable.data.exceptions import RetryExceptionGroup async with self._make_client(project="project") as client: @@ -2641,9 +2650,10 @@ def _make_client(self, *args, **kwargs): @CrossSync.convert async def _mock_response(self, response_list): - from google.cloud.bigtable_v2.types import MutateRowsResponse from google.rpc import status_pb2 + from google.cloud.bigtable_v2.types import MutateRowsResponse + statuses = [] for response in response_list: if isinstance(response, core_exceptions.GoogleAPICallError): @@ -2751,9 +2761,9 @@ async def test_bulk_mutate_rows_idempotent_mutation_error_retryable( Individual idempotent mutations should be retried if they fail with a retryable error """ from google.cloud.bigtable.data.exceptions import ( - RetryExceptionGroup, FailedMutationEntryError, MutationsExceptionGroup, + RetryExceptionGroup, ) async with self._make_client(project="project") as client: @@ -2837,9 +2847,9 @@ async def test_bulk_mutate_idempotent_retryable_request_errors( Individual idempotent mutations should be retried if the request fails with a retryable error """ from google.cloud.bigtable.data.exceptions import ( - RetryExceptionGroup, FailedMutationEntryError, MutationsExceptionGroup, + RetryExceptionGroup, ) async with self._make_client(project="project") as client: @@ -2949,13 +2959,14 @@ async def test_bulk_mutate_error_index(self): """ from google.api_core.exceptions import ( DeadlineExceeded, - ServiceUnavailable, FailedPrecondition, + ServiceUnavailable, ) + from google.cloud.bigtable.data.exceptions import ( - RetryExceptionGroup, FailedMutationEntryError, MutationsExceptionGroup, + RetryExceptionGroup, ) async with self._make_client(project="project") as client: @@ -3141,8 +3152,8 @@ async def test_check_and_mutate_predicate_object(self): @CrossSync.pytest async def test_check_and_mutate_mutations_parsing(self): """mutations objects should be converted to protos""" - from google.cloud.bigtable_v2.types import CheckAndMutateRowResponse from google.cloud.bigtable.data.mutations import DeleteAllFromFamily + from google.cloud.bigtable_v2.types import CheckAndMutateRowResponse mutations = [mock.Mock() for _ in range(5)] for idx, mutation in enumerate(mutations): diff --git a/packages/google-cloud-bigtable/tests/unit/data/_async/test_metrics_interceptor.py b/packages/google-cloud-bigtable/tests/unit/data/_async/test_metrics_interceptor.py index 1593b8c99acd..5c7c5851757b 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_async/test_metrics_interceptor.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_async/test_metrics_interceptor.py @@ -12,13 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +from grpc import ClientCallDetails, RpcError import pytest -from grpc import RpcError -from grpc import ClientCallDetails -from google.cloud.bigtable.data._metrics.data_model import ActiveOperationMetric -from google.cloud.bigtable.data._metrics.data_model import OperationState from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data._metrics.data_model import ( + ActiveOperationMetric, + OperationState, +) # try/except added for compatibility with python < 3.8 try: @@ -31,9 +32,9 @@ AsyncBigtableMetricsInterceptor, ) else: - from google.cloud.bigtable.data._sync_autogen.metrics_interceptor import ( # noqa: F401 + from google.cloud.bigtable.data._sync_autogen.metrics_interceptor import ( BigtableMetricsInterceptor, - ) + ) # noqa: F401 __CROSS_SYNC_OUTPUT__ = "tests.unit.data._sync_autogen.test_metrics_interceptor" diff --git a/packages/google-cloud-bigtable/tests/unit/data/_async/test_mutations_batcher.py b/packages/google-cloud-bigtable/tests/unit/data/_async/test_mutations_batcher.py index b139f31f1c7c..75de7c281332 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_async/test_mutations_batcher.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_async/test_mutations_batcher.py @@ -12,18 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest -import mock import asyncio import time + import google.api_core.exceptions as core_exceptions import google.api_core.retry -from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete -from google.cloud.bigtable.data.mutations import RowMutationEntry -from google.cloud.bigtable.data.mutations import DeleteAllFromRow -from google.cloud.bigtable.data import TABLE_DEFAULT +import mock +import pytest +from google.cloud.bigtable.data import TABLE_DEFAULT from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete +from google.cloud.bigtable.data.mutations import DeleteAllFromRow, RowMutationEntry __CROSS_SYNC_OUTPUT__ = "tests.unit.data._sync_autogen.test_mutations_batcher" @@ -305,8 +305,7 @@ def _get_target_class(self): return CrossSync.MutationsBatcher def _make_one(self, table=None, **kwargs): - from google.api_core.exceptions import DeadlineExceeded - from google.api_core.exceptions import ServiceUnavailable + from google.api_core.exceptions import DeadlineExceeded, ServiceUnavailable if table is None: table = mock.Mock() @@ -887,9 +886,10 @@ async def gen(x): @CrossSync.convert async def _mock_gapic_return(self, num=5): - from google.cloud.bigtable_v2.types import MutateRowsResponse from google.rpc import status_pb2 + from google.cloud.bigtable_v2.types import MutateRowsResponse + @CrossSync.convert async def gen(num): for i in range(num): @@ -949,8 +949,8 @@ async def test__execute_mutate_rows(self): async def test__execute_mutate_rows_returns_errors(self): """Errors from operation should be retruned as list""" from google.cloud.bigtable.data.exceptions import ( - MutationsExceptionGroup, FailedMutationEntryError, + MutationsExceptionGroup, ) with mock.patch.object(CrossSync._MutateRowsOperation, "start") as mutate_rows: diff --git a/packages/google-cloud-bigtable/tests/unit/data/_async/test_read_rows_acceptance.py b/packages/google-cloud-bigtable/tests/unit/data/_async/test_read_rows_acceptance.py index ab9502223c21..bf7815270fe6 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_async/test_read_rows_acceptance.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_async/test_read_rows_acceptance.py @@ -13,23 +13,20 @@ # limitations under the License. from __future__ import annotations +from itertools import zip_longest import os import warnings -import pytest -import mock -from itertools import zip_longest - -from google.cloud.bigtable_v2 import ReadRowsResponse +import mock +import pytest +from google.cloud.bigtable.data._cross_sync import CrossSync from google.cloud.bigtable.data.exceptions import InvalidChunk from google.cloud.bigtable.data.row import Row +from google.cloud.bigtable_v2 import ReadRowsResponse from ...v2_client.test_row_merger import ReadRowsTest, TestFile -from google.cloud.bigtable.data._cross_sync import CrossSync - - __CROSS_SYNC_OUTPUT__ = "tests.unit.data._sync_autogen.test_read_rows_acceptance" diff --git a/packages/google-cloud-bigtable/tests/unit/data/_cross_sync/test_cross_sync.py b/packages/google-cloud-bigtable/tests/unit/data/_cross_sync/test_cross_sync.py index 410f59437711..4efc4179deb4 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_cross_sync/test_cross_sync.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_cross_sync/test_cross_sync.py @@ -11,16 +11,18 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import typing import asyncio -import pytest -import pytest_asyncio -import threading import concurrent.futures -import time -import queue import functools +import queue import sys +import threading +import time +import typing + +import pytest +import pytest_asyncio + from google import api_core from google.cloud.bigtable.data._cross_sync.cross_sync import CrossSync, T diff --git a/packages/google-cloud-bigtable/tests/unit/data/_cross_sync/test_cross_sync_decorators.py b/packages/google-cloud-bigtable/tests/unit/data/_cross_sync/test_cross_sync_decorators.py index 3be579379597..26640a959f11 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_cross_sync/test_cross_sync_decorators.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_cross_sync/test_cross_sync_decorators.py @@ -12,18 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest -import pytest_asyncio import ast from unittest import mock -from google.cloud.bigtable.data._cross_sync.cross_sync import CrossSync + +import pytest +import pytest_asyncio + from google.cloud.bigtable.data._cross_sync._decorators import ( - ConvertClass, Convert, + ConvertClass, Drop, Pytest, PytestFixture, ) +from google.cloud.bigtable.data._cross_sync.cross_sync import CrossSync @pytest.fixture diff --git a/packages/google-cloud-bigtable/tests/unit/data/_metrics/test_data_model.py b/packages/google-cloud-bigtable/tests/unit/data/_metrics/test_data_model.py index 93e73c9d8603..307d8ba4ce87 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_metrics/test_data_model.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_metrics/test_data_model.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest import mock +import pytest from google.cloud.bigtable.data._metrics.data_model import OperationState as State from google.cloud.bigtable_v2.types import ResponseParams @@ -627,8 +627,9 @@ def test__exc_to_status(self): If BigtableExceptionGroup, use the most recent exception in the group """ - from grpc import StatusCode from google.api_core import exceptions as core_exc + from grpc import StatusCode + from google.cloud.bigtable.data import exceptions as bt_exc cls = type(self._make_one(object())) diff --git a/packages/google-cloud-bigtable/tests/unit/data/_metrics/test_tracked_retry.py b/packages/google-cloud-bigtable/tests/unit/data/_metrics/test_tracked_retry.py index 39713dc694f5..1260f289d8f8 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_metrics/test_tracked_retry.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_metrics/test_tracked_retry.py @@ -12,14 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest import inspect -import mock import sys -from grpc import StatusCode + from google.api_core import exceptions as core_exceptions -from google.api_core.retry import RetryFailureReason import google.api_core.retry as retry_module +from google.api_core.retry import RetryFailureReason +from grpc import StatusCode +import mock +import pytest class TestTrackRetryableError: diff --git a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__mutate_rows.py b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__mutate_rows.py index b198df01b9c8..f2dd9ff03dc8 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__mutate_rows.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__mutate_rows.py @@ -15,14 +15,13 @@ # This file is automatically generated by CrossSync. Do not edit manually. -import pytest -from google.cloud.bigtable_v2.types import MutateRowsResponse -from google.cloud.bigtable.data.mutations import RowMutationEntry -from google.cloud.bigtable.data.mutations import DeleteAllFromRow +from google.api_core.exceptions import DeadlineExceeded, Forbidden from google.rpc import status_pb2 -from google.api_core.exceptions import DeadlineExceeded -from google.api_core.exceptions import Forbidden +import pytest + from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data.mutations import DeleteAllFromRow, RowMutationEntry +from google.cloud.bigtable_v2.types import MutateRowsResponse try: from unittest import mock @@ -74,10 +73,10 @@ def _make_mock_gapic(self, mutation_list, error_dict=None): def test_ctor(self): """test that constructor sets all the attributes correctly""" + from google.api_core.exceptions import Aborted, DeadlineExceeded + from google.cloud.bigtable.data._async._mutate_rows import _EntryWithProto from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete - from google.api_core.exceptions import DeadlineExceeded - from google.api_core.exceptions import Aborted client = mock.Mock() table = mock.Mock() @@ -170,8 +169,10 @@ def test_mutate_rows_attempt_exception(self, exc_type): @pytest.mark.parametrize("exc_type", [RuntimeError, ZeroDivisionError, Forbidden]) def test_mutate_rows_exception(self, exc_type): """exceptions raised from retryable should be raised in MutationsExceptionGroup""" - from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup - from google.cloud.bigtable.data.exceptions import FailedMutationEntryError + from google.cloud.bigtable.data.exceptions import ( + FailedMutationEntryError, + MutationsExceptionGroup, + ) client = mock.Mock() table = mock.Mock() @@ -223,10 +224,13 @@ def test_mutate_rows_exception_retryable_eventually_pass(self, exc_type): def test_mutate_rows_incomplete_ignored(self): """MutateRowsIncomplete exceptions should not be added to error list""" - from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete - from google.cloud.bigtable.data.exceptions import MutationsExceptionGroup from google.api_core.exceptions import DeadlineExceeded + from google.cloud.bigtable.data.exceptions import ( + MutationsExceptionGroup, + _MutateRowsIncomplete, + ) + client = mock.Mock() table = mock.Mock() entries = [self._make_mutation()] diff --git a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__read_rows.py b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__read_rows.py index a545142d3dfb..a9744c5e9941 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__read_rows.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__read_rows.py @@ -16,6 +16,7 @@ # This file is automatically generated by CrossSync. Do not edit manually. import pytest + from google.cloud.bigtable.data._cross_sync import CrossSync try: @@ -90,9 +91,9 @@ def test_ctor(self): def test_revise_request_rowset_keys_with_range( self, in_keys, last_key, expected, with_range ): - from google.cloud.bigtable_v2.types import RowSet as RowSetPB - from google.cloud.bigtable_v2.types import RowRange as RowRangePB from google.cloud.bigtable.data.exceptions import _RowSetComplete + from google.cloud.bigtable_v2.types import RowRange as RowRangePB + from google.cloud.bigtable_v2.types import RowSet as RowSetPB in_keys = [key.encode("utf-8") for key in in_keys] expected = [key.encode("utf-8") for key in expected] @@ -159,9 +160,9 @@ def test_revise_request_rowset_keys_with_range( def test_revise_request_rowset_ranges( self, in_ranges, last_key, expected, with_key ): - from google.cloud.bigtable_v2.types import RowSet as RowSetPB - from google.cloud.bigtable_v2.types import RowRange as RowRangePB from google.cloud.bigtable.data.exceptions import _RowSetComplete + from google.cloud.bigtable_v2.types import RowRange as RowRangePB + from google.cloud.bigtable_v2.types import RowSet as RowSetPB next_key = (last_key + "a").encode("utf-8") last_key = last_key.encode("utf-8") @@ -188,8 +189,8 @@ def test_revise_request_rowset_ranges( @pytest.mark.parametrize("last_key", ["a", "b", "c"]) def test_revise_request_full_table(self, last_key): - from google.cloud.bigtable_v2.types import RowSet as RowSetPB from google.cloud.bigtable_v2.types import RowRange as RowRangePB + from google.cloud.bigtable_v2.types import RowSet as RowSetPB last_key = last_key.encode("utf-8") row_set = RowSetPB() @@ -204,8 +205,8 @@ def test_revise_request_full_table(self, last_key): def test_revise_to_empty_rowset(self): """revising to an empty rowset should raise error""" from google.cloud.bigtable.data.exceptions import _RowSetComplete - from google.cloud.bigtable_v2.types import RowSet as RowSetPB from google.cloud.bigtable_v2.types import RowRange as RowRangePB + from google.cloud.bigtable_v2.types import RowSet as RowSetPB row_keys = [b"a", b"b", b"c"] row_range = RowRangePB(end_key_open=b"c") @@ -265,8 +266,8 @@ def test_revise_limit_over_limit(self, start_limit, emit_num): """Should raise runtime error if we get in state where emit_num > start_num (unless start_num == 0, which represents unlimited)""" from google.cloud.bigtable.data import ReadRowsQuery - from google.cloud.bigtable_v2.types import ReadRowsResponse from google.cloud.bigtable.data.exceptions import InvalidChunk + from google.cloud.bigtable_v2.types import ReadRowsResponse def awaitable_stream(): def mock_stream(): diff --git a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__swappable_channel.py b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__swappable_channel.py index 04f3f61c8d86..fe3ddd3e31dd 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__swappable_channel.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test__swappable_channel.py @@ -20,8 +20,10 @@ from unittest import mock except ImportError: import mock -import pytest + from grpc import ChannelConnectivity +import pytest + from google.cloud.bigtable.data._sync_autogen._swappable_channel import ( SwappableChannel as TargetType, ) diff --git a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_client.py b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_client.py index 54be1f17c1b4..dca5c406824e 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_client.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_client.py @@ -15,25 +15,33 @@ # This file is automatically generated by CrossSync. Do not edit manually. from __future__ import annotations -import grpc + import asyncio import re -import pytest -import mock -from google.cloud.bigtable.data import mutations -from google.auth.credentials import AnonymousCredentials -from google.cloud.bigtable_v2.types import ReadRowsResponse -from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery -from google.api_core import exceptions as core_exceptions + from google.api_core import client_options -from google.cloud.bigtable.data.exceptions import InvalidChunk -from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete +from google.api_core import exceptions as core_exceptions +from google.api_core import grpc_helpers +from google.auth.credentials import AnonymousCredentials +import grpc +import mock +import pytest + +from google.cloud.bigtable.data import TABLE_DEFAULT, mutations +from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data._sync_autogen._swappable_channel import SwappableChannel +from google.cloud.bigtable.data._sync_autogen.metrics_interceptor import ( + BigtableMetricsInterceptor, +) +from google.cloud.bigtable.data.exceptions import InvalidChunk, _MutateRowsIncomplete from google.cloud.bigtable.data.mutations import DeleteAllFromRow -from google.cloud.bigtable.data import TABLE_DEFAULT -from google.cloud.bigtable.data.read_modify_write_rules import IncrementRule -from google.cloud.bigtable.data.read_modify_write_rules import AppendValueRule +from google.cloud.bigtable.data.read_modify_write_rules import ( + AppendValueRule, + IncrementRule, +) +from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery +from google.cloud.bigtable_v2.types import ReadRowsResponse from google.cloud.bigtable_v2.types.bigtable import ExecuteQueryResponse -from google.cloud.bigtable.data._cross_sync import CrossSync from tests.unit.data.execute_query.sql_helpers import ( chunked_responses, column, @@ -45,11 +53,6 @@ str_type, str_val, ) -from google.api_core import grpc_helpers -from google.cloud.bigtable.data._sync_autogen._swappable_channel import SwappableChannel -from google.cloud.bigtable.data._sync_autogen.metrics_interceptor import ( - BigtableMetricsInterceptor, -) CrossSync._Sync_Impl.add_mapping("grpc_helpers", grpc_helpers) CrossSync._Sync_Impl.add_mapping("SwappableChannel", SwappableChannel) @@ -95,8 +98,8 @@ def test_ctor(self): client.close() def test_ctor_super_inits(self): - from google.cloud.client import ClientWithProject from google.api_core import client_options as client_options_lib + from google.cloud.client import ClientWithProject project = "project-id" credentials = AnonymousCredentials() @@ -338,8 +341,8 @@ def test__manage_channel_ping_and_warm(self): [(None, 1, 60 * 35), (10, 10, 100), (10, 1, 10)], ) def test__manage_channel_sleeps(self, refresh_interval, num_cycles, expected_sleep): - import time import random + import time with mock.patch.object(random, "uniform") as uniform: uniform.side_effect = lambda min_, max_: min_ @@ -750,6 +753,7 @@ def test_api_surface_arg_passthrough(self, method): def test_api_surface_context_manager(self, method): """get_table and get_authorized_view should work as context managers""" from functools import partial + from google.cloud.bigtable.data._helpers import _WarmedInstanceKey expected_table_id = "table-id" @@ -1326,9 +1330,11 @@ def _make_table(self, *args, **kwargs): ) def _make_stats(self): - from google.cloud.bigtable_v2.types import RequestStats - from google.cloud.bigtable_v2.types import FullReadStatsView - from google.cloud.bigtable_v2.types import ReadIterationStats + from google.cloud.bigtable_v2.types import ( + FullReadStatsView, + ReadIterationStats, + RequestStats, + ) return RequestStats( full_read_stats_view=FullReadStatsView( @@ -1796,8 +1802,10 @@ def test_read_rows_sharded_multiple_queries_calls(self, n_queries): def test_read_rows_sharded_errors(self): """Errors should be exposed as ShardedReadRowsExceptionGroups""" - from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup - from google.cloud.bigtable.data.exceptions import FailedQueryShardError + from google.cloud.bigtable.data.exceptions import ( + FailedQueryShardError, + ShardedReadRowsExceptionGroup, + ) with self._make_client() as client: with client.get_table("instance", "table") as table: @@ -1879,9 +1887,10 @@ def mock_call(*args, **kwargs): def test_read_rows_sharded_expirary(self): """If the operation times out before all shards complete, should raise a ShardedReadRowsExceptionGroup""" + from google.api_core.exceptions import DeadlineExceeded + from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup - from google.api_core.exceptions import DeadlineExceeded operation_timeout = 0.1 num_queries = 15 @@ -1914,10 +1923,11 @@ def test_read_rows_sharded_negative_batch_timeout(self): """try to run with batch that starts after operation timeout They should raise DeadlineExceeded errors""" - from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup - from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT from google.api_core.exceptions import DeadlineExceeded + from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT + from google.cloud.bigtable.data.exceptions import ShardedReadRowsExceptionGroup + def mock_call(*args, **kwargs): CrossSync._Sync_Impl.sleep(0.06) return [mock.Mock()] @@ -2036,6 +2046,7 @@ def test_sample_row_keys_gapic_params(self): def test_sample_row_keys_retryable_errors(self, retryable_exception): """retryable errors should be retried until timeout""" from google.api_core.exceptions import DeadlineExceeded + from google.cloud.bigtable.data.exceptions import RetryExceptionGroup with self._make_client() as client: @@ -2136,6 +2147,7 @@ def test_mutate_row(self, mutation_arg): ) def test_mutate_row_retryable_errors(self, retryable_exception): from google.api_core.exceptions import DeadlineExceeded + from google.cloud.bigtable.data.exceptions import RetryExceptionGroup with self._make_client(project="project") as client: @@ -2213,9 +2225,10 @@ def _make_client(self, *args, **kwargs): return CrossSync._Sync_Impl.TestBigtableDataClient._make_client(*args, **kwargs) def _mock_response(self, response_list): - from google.cloud.bigtable_v2.types import MutateRowsResponse from google.rpc import status_pb2 + from google.cloud.bigtable_v2.types import MutateRowsResponse + statuses = [] for response in response_list: if isinstance(response, core_exceptions.GoogleAPICallError): @@ -2308,9 +2321,9 @@ def test_bulk_mutate_rows_multiple_entries(self): def test_bulk_mutate_rows_idempotent_mutation_error_retryable(self, exception): """Individual idempotent mutations should be retried if they fail with a retryable error""" from google.cloud.bigtable.data.exceptions import ( - RetryExceptionGroup, FailedMutationEntryError, MutationsExceptionGroup, + RetryExceptionGroup, ) with self._make_client(project="project") as client: @@ -2380,9 +2393,9 @@ def test_bulk_mutate_rows_idempotent_mutation_error_non_retryable(self, exceptio def test_bulk_mutate_idempotent_retryable_request_errors(self, retryable_exception): """Individual idempotent mutations should be retried if the request fails with a retryable error""" from google.cloud.bigtable.data.exceptions import ( - RetryExceptionGroup, FailedMutationEntryError, MutationsExceptionGroup, + RetryExceptionGroup, ) with self._make_client(project="project") as client: @@ -2482,13 +2495,14 @@ def test_bulk_mutate_error_index(self): """Test partial failure, partial success. Errors should be associated with the correct index""" from google.api_core.exceptions import ( DeadlineExceeded, - ServiceUnavailable, FailedPrecondition, + ServiceUnavailable, ) + from google.cloud.bigtable.data.exceptions import ( - RetryExceptionGroup, FailedMutationEntryError, MutationsExceptionGroup, + RetryExceptionGroup, ) with self._make_client(project="project") as client: @@ -2662,8 +2676,8 @@ def test_check_and_mutate_predicate_object(self): def test_check_and_mutate_mutations_parsing(self): """mutations objects should be converted to protos""" - from google.cloud.bigtable_v2.types import CheckAndMutateRowResponse from google.cloud.bigtable.data.mutations import DeleteAllFromFamily + from google.cloud.bigtable_v2.types import CheckAndMutateRowResponse mutations = [mock.Mock() for _ in range(5)] for idx, mutation in enumerate(mutations): diff --git a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_metrics_interceptor.py b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_metrics_interceptor.py index c4efcc5b96c3..c86ea7d3449c 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_metrics_interceptor.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_metrics_interceptor.py @@ -15,17 +15,20 @@ # This file is automatically generated by CrossSync. Do not edit manually. +from grpc import ClientCallDetails, RpcError import pytest -from grpc import RpcError -from grpc import ClientCallDetails -from google.cloud.bigtable.data._metrics.data_model import ActiveOperationMetric -from google.cloud.bigtable.data._metrics.data_model import OperationState + from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data._metrics.data_model import ( + ActiveOperationMetric, + OperationState, +) try: from unittest import mock except ImportError: import mock + from google.cloud.bigtable.data._sync_autogen.metrics_interceptor import ( BigtableMetricsInterceptor, ) diff --git a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_mutations_batcher.py b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_mutations_batcher.py index 92d16b349273..c1ee196926bf 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_mutations_batcher.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_mutations_batcher.py @@ -15,17 +15,18 @@ # This file is automatically generated by CrossSync. Do not edit manually. -import pytest -import mock import asyncio import time + import google.api_core.exceptions as core_exceptions import google.api_core.retry -from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete -from google.cloud.bigtable.data.mutations import RowMutationEntry -from google.cloud.bigtable.data.mutations import DeleteAllFromRow +import mock +import pytest + from google.cloud.bigtable.data import TABLE_DEFAULT from google.cloud.bigtable.data._cross_sync import CrossSync +from google.cloud.bigtable.data.exceptions import _MutateRowsIncomplete +from google.cloud.bigtable.data.mutations import DeleteAllFromRow, RowMutationEntry class Test_FlowControl: @@ -255,8 +256,7 @@ def _get_target_class(self): return CrossSync._Sync_Impl.MutationsBatcher def _make_one(self, table=None, **kwargs): - from google.api_core.exceptions import DeadlineExceeded - from google.api_core.exceptions import ServiceUnavailable + from google.api_core.exceptions import DeadlineExceeded, ServiceUnavailable if table is None: table = mock.Mock() @@ -770,9 +770,10 @@ def gen(x): instance._newest_exceptions.clear() def _mock_gapic_return(self, num=5): - from google.cloud.bigtable_v2.types import MutateRowsResponse from google.rpc import status_pb2 + from google.cloud.bigtable_v2.types import MutateRowsResponse + def gen(num): for i in range(num): entry = MutateRowsResponse.Entry( @@ -828,8 +829,8 @@ def test__execute_mutate_rows(self): def test__execute_mutate_rows_returns_errors(self): """Errors from operation should be retruned as list""" from google.cloud.bigtable.data.exceptions import ( - MutationsExceptionGroup, FailedMutationEntryError, + MutationsExceptionGroup, ) with mock.patch.object( diff --git a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_read_rows_acceptance.py b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_read_rows_acceptance.py index 8ceb0daf764d..60c6a347bda4 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_read_rows_acceptance.py +++ b/packages/google-cloud-bigtable/tests/unit/data/_sync_autogen/test_read_rows_acceptance.py @@ -15,16 +15,20 @@ # This file is automatically generated by CrossSync. Do not edit manually. from __future__ import annotations + +from itertools import zip_longest import os import warnings -import pytest + import mock -from itertools import zip_longest -from google.cloud.bigtable_v2 import ReadRowsResponse +import pytest + +from google.cloud.bigtable.data._cross_sync import CrossSync from google.cloud.bigtable.data.exceptions import InvalidChunk from google.cloud.bigtable.data.row import Row +from google.cloud.bigtable_v2 import ReadRowsResponse + from ...v2_client.test_row_merger import ReadRowsTest, TestFile -from google.cloud.bigtable.data._cross_sync import CrossSync class TestReadRowsAcceptance: diff --git a/packages/google-cloud-bigtable/tests/unit/data/execute_query/_async/test_query_iterator.py b/packages/google-cloud-bigtable/tests/unit/data/execute_query/_async/test_query_iterator.py index df6321f7f5e0..aa0f69ee73e3 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/execute_query/_async/test_query_iterator.py +++ b/packages/google-cloud-bigtable/tests/unit/data/execute_query/_async/test_query_iterator.py @@ -12,22 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. +import concurrent.futures import gc + +import pytest + from google.cloud.bigtable.data import exceptions +from google.cloud.bigtable.data._cross_sync import CrossSync from google.cloud.bigtable.data.execute_query.metadata import ( _pb_metadata_to_metadata_types, ) -import pytest -import concurrent.futures -from ..sql_helpers import ( - chunked_responses, - int_val, - column, - metadata, - int64_type, -) -from google.cloud.bigtable.data._cross_sync import CrossSync +from ..sql_helpers import chunked_responses, column, int64_type, int_val, metadata # try/except added for compatibility with python < 3.8 try: diff --git a/packages/google-cloud-bigtable/tests/unit/data/execute_query/_sync_autogen/test_query_iterator.py b/packages/google-cloud-bigtable/tests/unit/data/execute_query/_sync_autogen/test_query_iterator.py index 3915693cd2cc..7c3efce32126 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/execute_query/_sync_autogen/test_query_iterator.py +++ b/packages/google-cloud-bigtable/tests/unit/data/execute_query/_sync_autogen/test_query_iterator.py @@ -15,15 +15,18 @@ # This file is automatically generated by CrossSync. Do not edit manually. +import concurrent.futures import gc + +import pytest + from google.cloud.bigtable.data import exceptions +from google.cloud.bigtable.data._cross_sync import CrossSync from google.cloud.bigtable.data.execute_query.metadata import ( _pb_metadata_to_metadata_types, ) -import pytest -import concurrent.futures -from ..sql_helpers import chunked_responses, int_val, column, metadata, int64_type -from google.cloud.bigtable.data._cross_sync import CrossSync + +from ..sql_helpers import chunked_responses, column, int64_type, int_val, metadata try: from unittest import mock diff --git a/packages/google-cloud-bigtable/tests/unit/data/execute_query/resources/singer_pb2.py b/packages/google-cloud-bigtable/tests/unit/data/execute_query/resources/singer_pb2.py index b4481db4bc80..07e17af880d3 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/execute_query/resources/singer_pb2.py +++ b/packages/google-cloud-bigtable/tests/unit/data/execute_query/resources/singer_pb2.py @@ -2,10 +2,10 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: singer.proto """Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder # @@protoc_insertion_point(imports) diff --git a/packages/google-cloud-bigtable/tests/unit/data/execute_query/sql_helpers.py b/packages/google-cloud-bigtable/tests/unit/data/execute_query/sql_helpers.py index 119bb2d50862..e260e61657e7 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/execute_query/sql_helpers.py +++ b/packages/google-cloud-bigtable/tests/unit/data/execute_query/sql_helpers.py @@ -16,20 +16,20 @@ from typing import List from google.protobuf import timestamp_pb2 +import google_crc32c # type: ignore from google.cloud.bigtable_v2.types.bigtable import ( ExecuteQueryResponse, PrepareQueryResponse, ) from google.cloud.bigtable_v2.types.data import ( - Value, + ColumnMetadata, ProtoRows, ProtoRowsBatch, ResultSetMetadata, - ColumnMetadata, + Value, ) from google.cloud.bigtable_v2.types.types import Type -import google_crc32c # type: ignore def checksum(data: bytearray) -> int: diff --git a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_byte_cursor.py b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_byte_cursor.py index fc764c86cb6f..5792b444e92a 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_byte_cursor.py +++ b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_byte_cursor.py @@ -15,11 +15,7 @@ from google.cloud.bigtable.data.execute_query._byte_cursor import _ByteCursor -from .sql_helpers import ( - batch_response, - checksum, - token_only_response, -) +from .sql_helpers import batch_response, checksum, token_only_response def pass_values_to_byte_cursor(byte_cursor, iterable): diff --git a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_checksum.py b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_checksum.py index 2a391882dc0e..e5db338e3bd3 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_checksum.py +++ b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_checksum.py @@ -11,12 +11,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import pytest - import sys from unittest import mock import warnings +import pytest + with warnings.catch_warnings(record=True) as suppressed_warning: warnings.warn("Supressed warning", RuntimeWarning) diff --git a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_execute_query_parameters_parsing.py b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_execute_query_parameters_parsing.py index a49d25788deb..fbaf4b0a94d9 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_execute_query_parameters_parsing.py +++ b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_execute_query_parameters_parsing.py @@ -15,6 +15,7 @@ import datetime from google.api_core.datetime_helpers import DatetimeWithNanoseconds +from google.protobuf import timestamp_pb2 from google.type import date_pb2 import pytest @@ -24,7 +25,7 @@ ) from google.cloud.bigtable.data.execute_query.metadata import SqlType from google.cloud.bigtable.data.execute_query.values import Struct -from google.protobuf import timestamp_pb2 + from .resources import singer_pb2 timestamp = int( diff --git a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_query_result_parsing_utils.py b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_query_result_parsing_utils.py index 4d1068c7490f..f96482505813 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_query_result_parsing_utils.py +++ b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_query_result_parsing_utils.py @@ -12,23 +12,24 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime + +from google.api_core.datetime_helpers import DatetimeWithNanoseconds +from google.type import date_pb2 import pytest -from google.cloud.bigtable.data.execute_query.values import Struct -from google.cloud.bigtable_v2 import Type as PBType, Value as PBValue + from google.cloud.bigtable.data.execute_query._query_result_parsing_utils import ( _parse_pb_value_to_python_value, ) from google.cloud.bigtable.data.execute_query.metadata import ( - _pb_type_to_metadata_type, SqlType, + _pb_type_to_metadata_type, ) +from google.cloud.bigtable.data.execute_query.values import Struct +from google.cloud.bigtable_v2 import Type as PBType +from google.cloud.bigtable_v2 import Value as PBValue +from tests.unit.data.execute_query.sql_helpers import enum_type, int64_type, proto_type -from google.type import date_pb2 -from google.api_core.datetime_helpers import DatetimeWithNanoseconds - -import datetime - -from tests.unit.data.execute_query.sql_helpers import int64_type, proto_type, enum_type from .resources import singer_pb2 TYPE_BYTES = {"bytes_type": {}} diff --git a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_query_result_row_reader.py b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_query_result_row_reader.py index 8667643a123c..c2ccc679a19d 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_query_result_row_reader.py +++ b/packages/google-cloud-bigtable/tests/unit/data/execute_query/test_query_result_row_reader.py @@ -12,19 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest from unittest import mock -from google.cloud.bigtable_v2.types.data import Value as PBValue -from google.cloud.bigtable.data.execute_query._reader import _QueryResultRowReader +import pytest + +import google.cloud.bigtable.data.execute_query._reader +from google.cloud.bigtable.data.execute_query._reader import _QueryResultRowReader from google.cloud.bigtable.data.execute_query.metadata import ( Metadata, SqlType, _pb_metadata_to_metadata_types, ) - -import google.cloud.bigtable.data.execute_query._reader +from google.cloud.bigtable_v2.types.data import Value as PBValue from tests.unit.data.execute_query.sql_helpers import ( + bytes_val, chunked_responses, column, int64_type, @@ -32,8 +33,8 @@ metadata, proto_rows_bytes, str_val, - bytes_val, ) + from .resources import singer_pb2 diff --git a/packages/google-cloud-bigtable/tests/unit/data/test__helpers.py b/packages/google-cloud-bigtable/tests/unit/data/test__helpers.py index c8540024d60c..98197cb251f5 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/test__helpers.py +++ b/packages/google-cloud-bigtable/tests/unit/data/test__helpers.py @@ -12,14 +12,14 @@ # limitations under the License. # -import pytest -import grpc from google.api_core import exceptions as core_exceptions +import grpc +import mock +import pytest + import google.cloud.bigtable.data._helpers as _helpers from google.cloud.bigtable.data._helpers import TABLE_DEFAULT -import mock - class TestAttemptTimeoutGenerator: @pytest.mark.parametrize( diff --git a/packages/google-cloud-bigtable/tests/unit/data/test_exceptions.py b/packages/google-cloud-bigtable/tests/unit/data/test_exceptions.py index bc921717e596..3095065817f8 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/test_exceptions.py +++ b/packages/google-cloud-bigtable/tests/unit/data/test_exceptions.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys import unittest + import pytest -import sys import google.cloud.bigtable.data.exceptions as bigtable_exceptions diff --git a/packages/google-cloud-bigtable/tests/unit/data/test_helpers.py b/packages/google-cloud-bigtable/tests/unit/data/test_helpers.py index 5d1ad70f8e97..7668f51d0cad 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/test_helpers.py +++ b/packages/google-cloud-bigtable/tests/unit/data/test_helpers.py @@ -14,6 +14,7 @@ # import pytest + from google.cloud.bigtable.helpers import batched diff --git a/packages/google-cloud-bigtable/tests/unit/data/test_read_rows_query.py b/packages/google-cloud-bigtable/tests/unit/data/test_read_rows_query.py index ba3b0468bbb8..e88ca4a9bb71 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/test_read_rows_query.py +++ b/packages/google-cloud-bigtable/tests/unit/data/test_read_rows_query.py @@ -206,8 +206,8 @@ def test_ctor_defaults(self): assert query.limit is None def test_ctor_explicit(self): - from google.cloud.bigtable.data.row_filters import RowFilterChain from google.cloud.bigtable.data.read_rows_query import RowRange + from google.cloud.bigtable.data.row_filters import RowFilterChain filter_ = RowFilterChain() query = self._make_one( @@ -559,8 +559,7 @@ def test_shard_limit_exception(self): ], ) def test___eq__(self, first_args, second_args, expected): - from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery - from google.cloud.bigtable.data.read_rows_query import RowRange + from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery, RowRange # replace row_range placeholders with a RowRange object if len(first_args) > 1: diff --git a/packages/google-cloud-bigtable/tests/unit/data/test_row.py b/packages/google-cloud-bigtable/tests/unit/data/test_row.py index 10b5bdb2316f..95a72399acff 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/test_row.py +++ b/packages/google-cloud-bigtable/tests/unit/data/test_row.py @@ -12,9 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import unittest - import time +import unittest TEST_VALUE = b"1234" TEST_ROW_KEY = b"row" @@ -59,10 +58,10 @@ def test__from_pb(self): """ Construct from protobuf. """ - from google.cloud.bigtable_v2.types import Row as RowPB - from google.cloud.bigtable_v2.types import Family as FamilyPB - from google.cloud.bigtable_v2.types import Column as ColumnPB from google.cloud.bigtable_v2.types import Cell as CellPB + from google.cloud.bigtable_v2.types import Column as ColumnPB + from google.cloud.bigtable_v2.types import Family as FamilyPB + from google.cloud.bigtable_v2.types import Row as RowPB row_key = b"row_key" cells = [ diff --git a/packages/google-cloud-bigtable/tests/unit/data/test_row_filters.py b/packages/google-cloud-bigtable/tests/unit/data/test_row_filters.py index e90b6f270a61..f02758769a7e 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/test_row_filters.py +++ b/packages/google-cloud-bigtable/tests/unit/data/test_row_filters.py @@ -17,10 +17,12 @@ def test_abstract_class_constructors(): - from google.cloud.bigtable.data.row_filters import RowFilter - from google.cloud.bigtable.data.row_filters import _BoolFilter - from google.cloud.bigtable.data.row_filters import _FilterCombination - from google.cloud.bigtable.data.row_filters import _CellCountFilter + from google.cloud.bigtable.data.row_filters import ( + RowFilter, + _BoolFilter, + _CellCountFilter, + _FilterCombination, + ) with pytest.raises(TypeError): RowFilter() @@ -392,7 +394,9 @@ def test_timestamp_range___ne__same_value(): def _timestamp_range_to_pb_helper(pb_kwargs, start=None, end=None): import datetime + from google.cloud._helpers import _EPOCH + from google.cloud.bigtable.data.row_filters import TimestampRange if start is not None: @@ -421,9 +425,10 @@ def test_timestamp_range_to_pb(): def test_timestamp_range_to_dict(): + import datetime + from google.cloud.bigtable.data.row_filters import TimestampRange from google.cloud.bigtable_v2.types import data as data_v2_pb2 - import datetime row_filter = TimestampRange( start=datetime.datetime(2019, 1, 1), end=datetime.datetime(2019, 1, 2) @@ -448,9 +453,10 @@ def test_timestamp_range_to_pb_start_only(): def test_timestamp_range_to_dict_start_only(): + import datetime + from google.cloud.bigtable.data.row_filters import TimestampRange from google.cloud.bigtable_v2.types import data as data_v2_pb2 - import datetime row_filter = TimestampRange(start=datetime.datetime(2019, 1, 1)) expected_dict = {"start_timestamp_micros": 1546300800000000} @@ -470,9 +476,10 @@ def test_timestamp_range_to_pb_end_only(): def test_timestamp_range_to_dict_end_only(): + import datetime + from google.cloud.bigtable.data.row_filters import TimestampRange from google.cloud.bigtable_v2.types import data as data_v2_pb2 - import datetime row_filter = TimestampRange(end=datetime.datetime(2019, 1, 2)) expected_dict = {"end_timestamp_micros": 1546387200000000} @@ -530,9 +537,10 @@ def test_timestamp_range_filter_to_pb(): def test_timestamp_range_filter_to_dict(): + import datetime + from google.cloud.bigtable.data.row_filters import TimestampRangeFilter from google.cloud.bigtable_v2.types import data as data_v2_pb2 - import datetime row_filter = TimestampRangeFilter( start=datetime.datetime(2019, 1, 1), end=datetime.datetime(2019, 1, 2) @@ -560,9 +568,10 @@ def test_timestamp_range_filter_empty_to_dict(): def test_timestamp_range_filter___repr__(): - from google.cloud.bigtable.data.row_filters import TimestampRangeFilter import datetime + from google.cloud.bigtable.data.row_filters import TimestampRangeFilter + start = datetime.datetime(2019, 1, 1) end = datetime.datetime(2019, 1, 2) row_filter = TimestampRangeFilter(start, end) @@ -944,9 +953,10 @@ def test_value_range_filter_constructor_explicit(): def test_value_range_filter_constructor_w_int_values(): - from google.cloud.bigtable.data.row_filters import ValueRangeFilter import struct + from google.cloud.bigtable.data.row_filters import ValueRangeFilter + start_value = 1 end_value = 10 @@ -1411,9 +1421,11 @@ def test_filter_combination___str__(): def test_row_filter_chain_to_pb(): - from google.cloud.bigtable.data.row_filters import RowFilterChain - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + RowFilterChain, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter1_pb = row_filter1._to_pb() @@ -1431,9 +1443,11 @@ def test_row_filter_chain_to_pb(): def test_row_filter_chain_to_dict(): - from google.cloud.bigtable.data.row_filters import RowFilterChain - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + RowFilterChain, + RowSampleFilter, + StripValueTransformerFilter, + ) from google.cloud.bigtable_v2.types import data as data_v2_pb2 row_filter1 = StripValueTransformerFilter(True) @@ -1452,10 +1466,12 @@ def test_row_filter_chain_to_dict(): def test_row_filter_chain_to_pb_nested(): - from google.cloud.bigtable.data.row_filters import CellsRowLimitFilter - from google.cloud.bigtable.data.row_filters import RowFilterChain - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + CellsRowLimitFilter, + RowFilterChain, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter2 = RowSampleFilter(0.25) @@ -1476,10 +1492,12 @@ def test_row_filter_chain_to_pb_nested(): def test_row_filter_chain_to_dict_nested(): - from google.cloud.bigtable.data.row_filters import CellsRowLimitFilter - from google.cloud.bigtable.data.row_filters import RowFilterChain - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + CellsRowLimitFilter, + RowFilterChain, + RowSampleFilter, + StripValueTransformerFilter, + ) from google.cloud.bigtable_v2.types import data as data_v2_pb2 row_filter1 = StripValueTransformerFilter(True) @@ -1502,9 +1520,11 @@ def test_row_filter_chain_to_dict_nested(): def test_row_filter_chain___repr__(): - from google.cloud.bigtable.data.row_filters import RowFilterChain - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + RowFilterChain, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter2 = RowSampleFilter(0.25) @@ -1516,9 +1536,11 @@ def test_row_filter_chain___repr__(): def test_row_filter_chain___str__(): - from google.cloud.bigtable.data.row_filters import RowFilterChain - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + RowFilterChain, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter2 = RowSampleFilter(0.25) @@ -1533,9 +1555,11 @@ def test_row_filter_chain___str__(): def test_row_filter_union_to_pb(): - from google.cloud.bigtable.data.row_filters import RowFilterUnion - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + RowFilterUnion, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter1_pb = row_filter1._to_pb() @@ -1553,9 +1577,11 @@ def test_row_filter_union_to_pb(): def test_row_filter_union_to_dict(): - from google.cloud.bigtable.data.row_filters import RowFilterUnion - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + RowFilterUnion, + RowSampleFilter, + StripValueTransformerFilter, + ) from google.cloud.bigtable_v2.types import data as data_v2_pb2 row_filter1 = StripValueTransformerFilter(True) @@ -1574,10 +1600,12 @@ def test_row_filter_union_to_dict(): def test_row_filter_union_to_pb_nested(): - from google.cloud.bigtable.data.row_filters import CellsRowLimitFilter - from google.cloud.bigtable.data.row_filters import RowFilterUnion - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + CellsRowLimitFilter, + RowFilterUnion, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter2 = RowSampleFilter(0.25) @@ -1598,10 +1626,12 @@ def test_row_filter_union_to_pb_nested(): def test_row_filter_union_to_dict_nested(): - from google.cloud.bigtable.data.row_filters import CellsRowLimitFilter - from google.cloud.bigtable.data.row_filters import RowFilterUnion - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + CellsRowLimitFilter, + RowFilterUnion, + RowSampleFilter, + StripValueTransformerFilter, + ) from google.cloud.bigtable_v2.types import data as data_v2_pb2 row_filter1 = StripValueTransformerFilter(True) @@ -1624,9 +1654,11 @@ def test_row_filter_union_to_dict_nested(): def test_row_filter_union___repr__(): - from google.cloud.bigtable.data.row_filters import RowFilterUnion - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + RowFilterUnion, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter2 = RowSampleFilter(0.25) @@ -1638,9 +1670,11 @@ def test_row_filter_union___repr__(): def test_row_filter_union___str__(): - from google.cloud.bigtable.data.row_filters import RowFilterUnion - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + RowFilterUnion, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter2 = RowSampleFilter(0.25) @@ -1713,10 +1747,12 @@ def test_conditional_row_filter___ne__(): def test_conditional_row_filter_to_pb(): - from google.cloud.bigtable.data.row_filters import ConditionalRowFilter - from google.cloud.bigtable.data.row_filters import CellsRowOffsetFilter - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + CellsRowOffsetFilter, + ConditionalRowFilter, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter1_pb = row_filter1._to_pb() @@ -1743,10 +1779,12 @@ def test_conditional_row_filter_to_pb(): def test_conditional_row_filter_to_dict(): - from google.cloud.bigtable.data.row_filters import ConditionalRowFilter - from google.cloud.bigtable.data.row_filters import CellsRowOffsetFilter - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + CellsRowOffsetFilter, + ConditionalRowFilter, + RowSampleFilter, + StripValueTransformerFilter, + ) from google.cloud.bigtable_v2.types import data as data_v2_pb2 row_filter1 = StripValueTransformerFilter(True) @@ -1776,9 +1814,11 @@ def test_conditional_row_filter_to_dict(): def test_conditional_row_filter_to_pb_true_only(): - from google.cloud.bigtable.data.row_filters import ConditionalRowFilter - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + ConditionalRowFilter, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter1_pb = row_filter1._to_pb() @@ -1798,9 +1838,11 @@ def test_conditional_row_filter_to_pb_true_only(): def test_conditional_row_filter_to_dict_true_only(): - from google.cloud.bigtable.data.row_filters import ConditionalRowFilter - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + ConditionalRowFilter, + RowSampleFilter, + StripValueTransformerFilter, + ) from google.cloud.bigtable_v2.types import data as data_v2_pb2 row_filter1 = StripValueTransformerFilter(True) @@ -1824,9 +1866,11 @@ def test_conditional_row_filter_to_dict_true_only(): def test_conditional_row_filter_to_pb_false_only(): - from google.cloud.bigtable.data.row_filters import ConditionalRowFilter - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + ConditionalRowFilter, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter1_pb = row_filter1._to_pb() @@ -1846,9 +1890,11 @@ def test_conditional_row_filter_to_pb_false_only(): def test_conditional_row_filter_to_dict_false_only(): - from google.cloud.bigtable.data.row_filters import ConditionalRowFilter - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + ConditionalRowFilter, + RowSampleFilter, + StripValueTransformerFilter, + ) from google.cloud.bigtable_v2.types import data as data_v2_pb2 row_filter1 = StripValueTransformerFilter(True) @@ -1872,9 +1918,11 @@ def test_conditional_row_filter_to_dict_false_only(): def test_conditional_row_filter___repr__(): - from google.cloud.bigtable.data.row_filters import ConditionalRowFilter - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + ConditionalRowFilter, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter2 = RowSampleFilter(0.25) @@ -1893,10 +1941,12 @@ def test_conditional_row_filter___repr__(): def test_conditional_row_filter___str__(): - from google.cloud.bigtable.data.row_filters import ConditionalRowFilter - from google.cloud.bigtable.data.row_filters import RowSampleFilter - from google.cloud.bigtable.data.row_filters import RowFilterUnion - from google.cloud.bigtable.data.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.data.row_filters import ( + ConditionalRowFilter, + RowFilterUnion, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter2 = RowSampleFilter(0.25) @@ -1981,11 +2031,11 @@ def _ValueRangePB(*args, **kw): def _get_regex_filters(): from google.cloud.bigtable.data.row_filters import ( - RowKeyRegexFilter, - FamilyNameRegexFilter, ColumnQualifierRegexFilter, - ValueRegexFilter, + FamilyNameRegexFilter, LiteralValueFilter, + RowKeyRegexFilter, + ValueRegexFilter, ) return [ @@ -1999,9 +2049,9 @@ def _get_regex_filters(): def _get_bool_filters(): from google.cloud.bigtable.data.row_filters import ( - SinkFilter, - PassAllFilter, BlockAllFilter, + PassAllFilter, + SinkFilter, StripValueTransformerFilter, ) @@ -2015,9 +2065,9 @@ def _get_bool_filters(): def _get_cell_count_filters(): from google.cloud.bigtable.data.row_filters import ( + CellsColumnLimitFilter, CellsRowLimitFilter, CellsRowOffsetFilter, - CellsColumnLimitFilter, ) return [ @@ -2028,10 +2078,7 @@ def _get_cell_count_filters(): def _get_filter_combination_filters(): - from google.cloud.bigtable.data.row_filters import ( - RowFilterChain, - RowFilterUnion, - ) + from google.cloud.bigtable.data.row_filters import RowFilterChain, RowFilterUnion return [ RowFilterChain, diff --git a/packages/google-cloud-bigtable/tests/unit/data/test_sync_up_to_date.py b/packages/google-cloud-bigtable/tests/unit/data/test_sync_up_to_date.py index e6bce9cf6266..5aacce12c5d4 100644 --- a/packages/google-cloud-bigtable/tests/unit/data/test_sync_up_to_date.py +++ b/packages/google-cloud-bigtable/tests/unit/data/test_sync_up_to_date.py @@ -11,13 +11,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import ast +from difflib import unified_diff +import hashlib import os +import re import sys -import hashlib + import pytest -import ast -import re -from difflib import unified_diff if sys.version_info < (3, 9): pytest.skip("ast.unparse is only available in 3.9+", allow_module_level=True) @@ -28,7 +29,7 @@ cross_sync_path = os.path.join(repo_root, ".cross_sync") sys.path.append(cross_sync_path) -from generate import convert_files_in_dir, CrossSyncOutputFile # noqa: E402 +from generate import CrossSyncOutputFile, convert_files_in_dir # noqa: E402 sync_files = list(convert_files_in_dir(repo_root)) diff --git a/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py b/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py index b0ba35f0c3de..014859e21e5e 100644 --- a/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py +++ b/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_admin_v2/test_bigtable_instance_admin.py @@ -22,20 +22,19 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format +from collections.abc import AsyncIterable, Iterable import json import math -import pytest + from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from google.protobuf import json_format +import grpc +from grpc.experimental import aio from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.protobuf import json_format try: from google.auth.aio import credentials as ga_credentials_async @@ -44,31 +43,22 @@ except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template from google.api_core import retry as retries +import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminAsyncClient, -) -from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( - BigtableInstanceAdminClient, -) -from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import pagers -from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import transports -from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin -from google.cloud.bigtable_admin_v2.types import common -from google.cloud.bigtable_admin_v2.types import instance -from google.cloud.bigtable_admin_v2.types import instance as gba_instance from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -77,8 +67,16 @@ from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.type import expr_pb2 # type: ignore -import google.auth +from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( + BigtableInstanceAdminAsyncClient, + BigtableInstanceAdminClient, + pagers, + transports, +) +from google.cloud.bigtable_admin_v2.types import bigtable_instance_admin, common +from google.cloud.bigtable_admin_v2.types import instance +from google.cloud.bigtable_admin_v2.types import instance as gba_instance CRED_INFO_JSON = { "credential_source": "/path/to/file", diff --git a/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py b/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py index bff2206931b6..6c72041f9507 100644 --- a/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py +++ b/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_admin_v2/test_bigtable_table_admin.py @@ -22,20 +22,19 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format +from collections.abc import AsyncIterable, Iterable import json import math -import pytest + from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from google.protobuf import json_format +import grpc +from grpc.experimental import aio from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.protobuf import json_format try: from google.auth.aio import credentials as ga_credentials_async @@ -44,31 +43,22 @@ except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template from google.api_core import retry as retries +import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - BaseBigtableTableAdminAsyncClient, -) -from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - BaseBigtableTableAdminClient, -) -from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import pagers -from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import transports -from google.cloud.bigtable_admin_v2.types import bigtable_table_admin -from google.cloud.bigtable_admin_v2.types import table -from google.cloud.bigtable_admin_v2.types import table as gba_table -from google.cloud.bigtable_admin_v2.types import types from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -80,8 +70,17 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore from google.type import expr_pb2 # type: ignore -import google.auth +from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( + BaseBigtableTableAdminAsyncClient, + BaseBigtableTableAdminClient, + pagers, + transports, +) +from google.cloud.bigtable_admin_v2.types import bigtable_table_admin +from google.cloud.bigtable_admin_v2.types import table +from google.cloud.bigtable_admin_v2.types import table as gba_table +from google.cloud.bigtable_admin_v2.types import types CRED_INFO_JSON = { "credential_source": "/path/to/file", diff --git a/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_v2/test_bigtable.py b/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_v2/test_bigtable.py index ea7f0955d430..0b3a0b3b5f0b 100644 --- a/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_v2/test_bigtable.py +++ b/packages/google-cloud-bigtable/tests/unit/gapic/bigtable_v2/test_bigtable.py @@ -22,20 +22,19 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format +from collections.abc import AsyncIterable, Iterable import json import math -import pytest + from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from google.protobuf import json_format +import grpc +from grpc.experimental import aio from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.protobuf import json_format try: from google.auth.aio import credentials as ga_credentials_async @@ -44,28 +43,24 @@ except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template from google.api_core import client_options from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template from google.api_core import retry as retries +import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.bigtable_v2.services.bigtable import BigtableAsyncClient -from google.cloud.bigtable_v2.services.bigtable import BigtableClient -from google.cloud.bigtable_v2.services.bigtable import transports -from google.cloud.bigtable_v2.types import bigtable -from google.cloud.bigtable_v2.types import data -from google.cloud.bigtable_v2.types import request_stats -from google.cloud.bigtable_v2.types import types from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.type import date_pb2 # type: ignore -import google.auth +from google.cloud.bigtable_v2.services.bigtable import ( + BigtableAsyncClient, + BigtableClient, + transports, +) +from google.cloud.bigtable_v2.types import bigtable, data, request_stats, types CRED_INFO_JSON = { "credential_source": "/path/to/file", diff --git a/packages/google-cloud-bigtable/tests/unit/test_sql_routing_parameters.py b/packages/google-cloud-bigtable/tests/unit/test_sql_routing_parameters.py index fa9316369508..65f0b5f48fc6 100644 --- a/packages/google-cloud-bigtable/tests/unit/test_sql_routing_parameters.py +++ b/packages/google-cloud-bigtable/tests/unit/test_sql_routing_parameters.py @@ -20,9 +20,9 @@ from unittest.mock import AsyncMock # type: ignore # noqa: F401 except ImportError: # pragma: NO COVER import mock -import pytest from grpc.experimental import aio +import pytest try: from google.auth.aio import credentials as ga_credentials_async @@ -31,9 +31,9 @@ except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async +from google.api_core import gapic_v1, grpc_helpers_async from google.auth import credentials as ga_credentials + from google.cloud.bigtable_v2.services.bigtable.async_client import BigtableAsyncClient from google.cloud.bigtable_v2.services.bigtable.client import BigtableClient from google.cloud.bigtable_v2.types import bigtable diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_app_profile.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_app_profile.py index 660ee78998b0..2a99621eaf91 100644 --- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_app_profile.py +++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_app_profile.py @@ -165,9 +165,9 @@ def test_app_profile___ne__(): def test_app_profile_from_pb_success_w_routing_any(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 from google.cloud.bigtable.app_profile import AppProfile from google.cloud.bigtable.enums import RoutingPolicyType + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 client = _Client(PROJECT) instance = _Instance(INSTANCE_ID, client) @@ -194,9 +194,9 @@ def test_app_profile_from_pb_success_w_routing_any(): def test_app_profile_from_pb_success_w_routing_any_multi_cluster_ids(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 from google.cloud.bigtable.app_profile import AppProfile from google.cloud.bigtable.enums import RoutingPolicyType + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 client = _Client(PROJECT) instance = _Instance(INSTANCE_ID, client) @@ -225,9 +225,9 @@ def test_app_profile_from_pb_success_w_routing_any_multi_cluster_ids(): def test_app_profile_from_pb_success_w_routing_single(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 from google.cloud.bigtable.app_profile import AppProfile from google.cloud.bigtable.enums import RoutingPolicyType + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 client = _Client(PROJECT) instance = _Instance(INSTANCE_ID, client) @@ -258,8 +258,8 @@ def test_app_profile_from_pb_success_w_routing_single(): def test_app_profile_from_pb_w_bad_app_profile_name(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 from google.cloud.bigtable.app_profile import AppProfile + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 bad_app_profile_name = "BAD_NAME" @@ -270,8 +270,8 @@ def test_app_profile_from_pb_w_bad_app_profile_name(): def test_app_profile_from_pb_w_instance_id_mistmatch(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 from google.cloud.bigtable.app_profile import AppProfile + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 ALT_INSTANCE_ID = "ALT_INSTANCE_ID" client = _Client(PROJECT) @@ -285,8 +285,8 @@ def test_app_profile_from_pb_w_instance_id_mistmatch(): def test_app_profile_from_pb_w_project_mistmatch(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 from google.cloud.bigtable.app_profile import AppProfile + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 ALT_PROJECT = "ALT_PROJECT" client = _Client(project=ALT_PROJECT) @@ -300,11 +300,11 @@ def test_app_profile_from_pb_w_project_mistmatch(): def test_app_profile_reload_w_routing_any(): + from google.cloud.bigtable.enums import RoutingPolicyType from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( BigtableInstanceAdminClient, ) from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable.enums import RoutingPolicyType api = mock.create_autospec(BigtableInstanceAdminClient) credentials = _make_credentials() @@ -362,11 +362,12 @@ def test_app_profile_reload_w_routing_any(): def test_app_profile_exists(): + from google.api_core import exceptions + from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( BigtableInstanceAdminClient, ) from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.api_core import exceptions instance_api = mock.create_autospec(BigtableInstanceAdminClient) credentials = _make_credentials() @@ -397,11 +398,11 @@ def test_app_profile_exists(): def test_app_profile_create_w_routing_any(): + from google.cloud.bigtable.app_profile import AppProfile + from google.cloud.bigtable.enums import RoutingPolicyType from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( BigtableInstanceAdminClient, ) - from google.cloud.bigtable.app_profile import AppProfile - from google.cloud.bigtable.enums import RoutingPolicyType credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) @@ -458,11 +459,11 @@ def test_app_profile_create_w_routing_any(): def test_app_profile_create_w_routing_single(): + from google.cloud.bigtable.app_profile import AppProfile + from google.cloud.bigtable.enums import RoutingPolicyType from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( BigtableInstanceAdminClient, ) - from google.cloud.bigtable.app_profile import AppProfile - from google.cloud.bigtable.enums import RoutingPolicyType credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) @@ -529,15 +530,16 @@ def test_app_profile_create_w_wrong_routing_policy(): def test_app_profile_update_w_routing_any(): from google.longrunning import operations_pb2 + from google.protobuf import field_mask_pb2 from google.protobuf.any_pb2 import Any - from google.cloud.bigtable_admin_v2.types import ( - bigtable_instance_admin as messages_v2_pb2, - ) + from google.cloud.bigtable.enums import RoutingPolicyType from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( BigtableInstanceAdminClient, ) - from google.protobuf import field_mask_pb2 + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) @@ -604,15 +606,16 @@ def test_app_profile_update_w_routing_any(): def test_app_profile_update_w_routing_any_multi_cluster_ids(): from google.longrunning import operations_pb2 + from google.protobuf import field_mask_pb2 from google.protobuf.any_pb2 import Any - from google.cloud.bigtable_admin_v2.types import ( - bigtable_instance_admin as messages_v2_pb2, - ) + from google.cloud.bigtable.enums import RoutingPolicyType from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( BigtableInstanceAdminClient, ) - from google.protobuf import field_mask_pb2 + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) @@ -680,15 +683,16 @@ def test_app_profile_update_w_routing_any_multi_cluster_ids(): def test_app_profile_update_w_routing_single(): from google.longrunning import operations_pb2 + from google.protobuf import field_mask_pb2 from google.protobuf.any_pb2 import Any - from google.cloud.bigtable_admin_v2.types import ( - bigtable_instance_admin as messages_v2_pb2, - ) + from google.cloud.bigtable.enums import RoutingPolicyType from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( BigtableInstanceAdminClient, ) - from google.protobuf import field_mask_pb2 + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) @@ -752,6 +756,7 @@ def test_app_profile_update_w_wrong_routing_policy(): def test_app_profile_delete(): from google.protobuf import empty_pb2 + from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( BigtableInstanceAdminClient, ) diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_backup.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_backup.py index a5d205af652e..f8f96dcff22b 100644 --- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_backup.py +++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_backup.py @@ -100,8 +100,8 @@ def test_backup_constructor_explicit(): def test_backup_from_pb_w_project_mismatch(): - from google.cloud.bigtable_admin_v2.types import table from google.cloud.bigtable.backup import Backup + from google.cloud.bigtable_admin_v2.types import table alt_project_id = "alt-project-id" client = _Client(project=alt_project_id) @@ -113,8 +113,8 @@ def test_backup_from_pb_w_project_mismatch(): def test_backup_from_pb_w_instance_mismatch(): - from google.cloud.bigtable_admin_v2.types import table from google.cloud.bigtable.backup import Backup + from google.cloud.bigtable_admin_v2.types import table alt_instance = "/projects/%s/instances/alt-instance" % PROJECT_ID client = _Client() @@ -126,8 +126,8 @@ def test_backup_from_pb_w_instance_mismatch(): def test_backup_from_pb_w_bad_name(): - from google.cloud.bigtable_admin_v2.types import table from google.cloud.bigtable.backup import Backup + from google.cloud.bigtable_admin_v2.types import table client = _Client() instance = _Instance(INSTANCE_NAME, client) @@ -138,12 +138,13 @@ def test_backup_from_pb_w_bad_name(): def test_backup_from_pb_success(): + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.rpc.code_pb2 import Code + + from google.cloud.bigtable.backup import Backup from google.cloud.bigtable.encryption_info import EncryptionInfo from google.cloud.bigtable.error import Status from google.cloud.bigtable_admin_v2.types import table - from google.cloud.bigtable.backup import Backup - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.rpc.code_pb2 import Code client = _Client() instance = _Instance(INSTANCE_NAME, client) @@ -345,9 +346,9 @@ def test_backup___ne__(): def test_backup_create_w_grpc_error(): - from google.api_core.exceptions import GoogleAPICallError - from google.api_core.exceptions import Unknown + from google.api_core.exceptions import GoogleAPICallError, Unknown from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import table client = _Client() @@ -377,9 +378,10 @@ def test_backup_create_w_grpc_error(): def test_backup_create_w_already_exists(): from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable_admin_v2.types import table from google.cloud.exceptions import Conflict + from google.cloud.bigtable_admin_v2.types import table + client = _Client() api = client.table_admin_client = _make_table_admin_client() api.create_backup.side_effect = Conflict("testing") @@ -407,9 +409,10 @@ def test_backup_create_w_already_exists(): def test_backup_create_w_instance_not_found(): from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable_admin_v2.types import table from google.cloud.exceptions import NotFound + from google.cloud.bigtable_admin_v2.types import table + client = _Client() api = client.table_admin_client = _make_table_admin_client() api.create_backup.side_effect = NotFound("testing") @@ -471,8 +474,9 @@ def test_backup_create_w_expire_time_not_set(): def test_backup_create_success(): from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable_admin_v2.types import table + from google.cloud.bigtable import Client + from google.cloud.bigtable_admin_v2.types import table op_future = object() credentials = _make_credentials() @@ -503,9 +507,10 @@ def test_backup_create_success(): def test_backup_get(): - from google.cloud.bigtable_admin_v2.types import table from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import table + timestamp = _datetime_to_pb_timestamp(_make_timestamp()) state = table.Backup.State.READY @@ -529,9 +534,10 @@ def test_backup_get(): def test_backup_reload(): - from google.cloud.bigtable_admin_v2.types import table from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import table + timestamp = _datetime_to_pb_timestamp(_make_timestamp()) state = table.Backup.State.READY @@ -655,9 +661,10 @@ def test_backup_delete_success(): def test_backup_update_expire_time_w_grpc_error(): from google.api_core.exceptions import Unknown from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable_admin_v2.types import table from google.protobuf import field_mask_pb2 + from google.cloud.bigtable_admin_v2.types import table + client = _Client() api = client.table_admin_client = _make_table_admin_client() api.update_backup.side_effect = Unknown("testing") @@ -681,9 +688,10 @@ def test_backup_update_expire_time_w_grpc_error(): def test_backup_update_expire_time_w_not_found(): from google.api_core.exceptions import NotFound from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable_admin_v2.types import table from google.protobuf import field_mask_pb2 + from google.cloud.bigtable_admin_v2.types import table + client = _Client() api = client.table_admin_client = _make_table_admin_client() api.update_backup.side_effect = NotFound("testing") @@ -706,9 +714,10 @@ def test_backup_update_expire_time_w_not_found(): def test_backup_update_expire_time_success(): from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable_admin_v2.types import table from google.protobuf import field_mask_pb2 + from google.cloud.bigtable_admin_v2.types import table + client = _Client() api = client.table_admin_client = _make_table_admin_client() api.update_backup.return_type = table.Backup(name=BACKUP_NAME) @@ -729,8 +738,7 @@ def test_backup_update_expire_time_success(): def test_backup_restore_w_grpc_error(): - from google.api_core.exceptions import GoogleAPICallError - from google.api_core.exceptions import Unknown + from google.api_core.exceptions import GoogleAPICallError, Unknown client = _Client() api = client.table_admin_client = _make_table_admin_client() @@ -805,12 +813,13 @@ def test_backup_restore_to_another_instance(): def test_backup_get_iam_policy(): + from google.iam.v1 import policy_pb2 + from google.cloud.bigtable.client import Client + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( BaseBigtableTableAdminClient, ) - from google.iam.v1 import policy_pb2 - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE credentials = _make_credentials() client = Client(project=PROJECT_ID, credentials=credentials, admin=True) @@ -841,13 +850,13 @@ def test_backup_get_iam_policy(): def test_backup_set_iam_policy(): + from google.iam.v1 import policy_pb2 + from google.cloud.bigtable.client import Client + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE, Policy from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( BaseBigtableTableAdminClient, ) - from google.iam.v1 import policy_pb2 - from google.cloud.bigtable.policy import Policy - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE credentials = _make_credentials() client = Client(project=PROJECT_ID, credentials=credentials, admin=True) @@ -886,11 +895,12 @@ def test_backup_set_iam_policy(): def test_backup_test_iam_permissions(): + from google.iam.v1 import iam_policy_pb2 + from google.cloud.bigtable.client import Client from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( BaseBigtableTableAdminClient, ) - from google.iam.v1 import iam_policy_pb2 credentials = _make_credentials() client = Client(project=PROJECT_ID, credentials=credentials, admin=True) diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_batcher.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_batcher.py index fcf6069725fc..4090c3c81cea 100644 --- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_batcher.py +++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_batcher.py @@ -13,17 +13,17 @@ # limitations under the License. -import mock import time +import mock import pytest -from google.cloud.bigtable.row import DirectRow from google.cloud.bigtable.batcher import ( - _FlowControl, MutationsBatcher, MutationsBatchError, + _FlowControl, ) +from google.cloud.bigtable.row import DirectRow TABLE_ID = "table-id" TABLE_NAME = "/tables/" + TABLE_ID diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_client.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_client.py index a4fc0f9cb40e..fa2779990071 100644 --- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_client.py +++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_client.py @@ -109,6 +109,7 @@ def _make_client(*args, **kwargs): @mock.patch("os.environ", {}) def test_client_constructor_defaults(): from google.api_core import client_info + from google.cloud.bigtable import __version__ from google.cloud.bigtable.client import DATA_SCOPE @@ -131,8 +132,8 @@ def test_client_constructor_defaults(): def test_client_constructor_explicit(): import warnings - from google.cloud.bigtable.client import ADMIN_SCOPE - from google.cloud.bigtable.client import DATA_SCOPE + + from google.cloud.bigtable.client import ADMIN_SCOPE, DATA_SCOPE credentials = _make_credentials() client_info = mock.Mock() @@ -171,10 +172,13 @@ def test_client_constructor_w_both_admin_and_read_only(): def test_client_constructor_w_emulator_host(): from google.cloud.environment_vars import BIGTABLE_EMULATOR - from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT - from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS import grpc + from google.cloud.bigtable.client import ( + _DEFAULT_BIGTABLE_EMULATOR_CLIENT, + _GRPC_CHANNEL_OPTIONS, + ) + emulator_host = "localhost:8081" with mock.patch("os.environ", {BIGTABLE_EMULATOR: emulator_host}): channel = grpc.insecure_channel("no-host") @@ -197,9 +201,10 @@ def test_client_constructor_w_emulator_host(): def test_client_constructor_w_emulator_host_w_project(): from google.cloud.environment_vars import BIGTABLE_EMULATOR - from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS import grpc + from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS + emulator_host = "localhost:8081" with mock.patch("os.environ", {BIGTABLE_EMULATOR: emulator_host}): channel = grpc.insecure_channel("no-host") @@ -219,10 +224,13 @@ def test_client_constructor_w_emulator_host_w_project(): def test_client_constructor_w_emulator_host_w_credentials(): from google.cloud.environment_vars import BIGTABLE_EMULATOR - from google.cloud.bigtable.client import _DEFAULT_BIGTABLE_EMULATOR_CLIENT - from google.cloud.bigtable.client import _GRPC_CHANNEL_OPTIONS import grpc + from google.cloud.bigtable.client import ( + _DEFAULT_BIGTABLE_EMULATOR_CLIENT, + _GRPC_CHANNEL_OPTIONS, + ) + emulator_host = "localhost:8081" credentials = _make_credentials() with mock.patch("os.environ", {BIGTABLE_EMULATOR: emulator_host}): @@ -249,8 +257,7 @@ def test_client__get_scopes_default(): def test_client__get_scopes_w_admin(): - from google.cloud.bigtable.client import ADMIN_SCOPE - from google.cloud.bigtable.client import DATA_SCOPE + from google.cloud.bigtable.client import ADMIN_SCOPE, DATA_SCOPE client = _make_client(project=PROJECT, credentials=_make_credentials(), admin=True) expected_scopes = (DATA_SCOPE, ADMIN_SCOPE) @@ -597,8 +604,8 @@ def test_client_instance_factory_defaults(): def test_client_instance_factory_non_defaults(): - from google.cloud.bigtable.instance import Instance from google.cloud.bigtable import enums + from google.cloud.bigtable.instance import Instance instance_type = enums.Instance.Type.DEVELOPMENT labels = {"foo": "bar"} @@ -621,14 +628,14 @@ def test_client_instance_factory_non_defaults(): def test_client_list_instances(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable_admin_v2.types import ( - bigtable_instance_admin as messages_v2_pb2, - ) + from google.cloud.bigtable.instance import Instance from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( BigtableInstanceAdminClient, ) - from google.cloud.bigtable.instance import Instance + from google.cloud.bigtable_admin_v2.types import ( + bigtable_instance_admin as messages_v2_pb2, + ) + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 FAILED_LOCATION = "FAILED" INSTANCE_ID1 = "instance-id1" @@ -673,6 +680,7 @@ def test_client_list_instances(): def test_client_list_clusters(): + from google.cloud.bigtable.instance import Cluster from google.cloud.bigtable_admin_v2.services.bigtable_instance_admin import ( BigtableInstanceAdminClient, ) @@ -680,7 +688,6 @@ def test_client_list_clusters(): bigtable_instance_admin as messages_v2_pb2, ) from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable.instance import Cluster instance_api = mock.create_autospec(BigtableInstanceAdminClient) diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_cluster.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_cluster.py index a21104549bc6..b25ca4ab7adc 100644 --- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_cluster.py +++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_cluster.py @@ -72,8 +72,7 @@ def test_cluster_constructor_defaults(): def test_cluster_constructor_explicit(): - from google.cloud.bigtable.enums import StorageType - from google.cloud.bigtable.enums import Cluster + from google.cloud.bigtable.enums import Cluster, StorageType STATE = Cluster.State.READY STORAGE_TYPE_SSD = StorageType.SSD @@ -125,9 +124,9 @@ def test_cluster_kms_key_name_setter(): def test_cluster_from_pb_success(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable.cluster import Cluster from google.cloud.bigtable import enums + from google.cloud.bigtable.cluster import Cluster + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 client = _Client(PROJECT) instance = _Instance(INSTANCE_ID, client) @@ -161,8 +160,8 @@ def test_cluster_from_pb_success(): def test_cluster_from_pb_w_bad_cluster_name(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 from google.cloud.bigtable.cluster import Cluster + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 bad_cluster_name = "BAD_NAME" @@ -173,8 +172,8 @@ def test_cluster_from_pb_w_bad_cluster_name(): def test_cluster_from_pb_w_instance_id_mistmatch(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 from google.cloud.bigtable.cluster import Cluster + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 ALT_INSTANCE_ID = "ALT_INSTANCE_ID" client = _Client(PROJECT) @@ -188,8 +187,8 @@ def test_cluster_from_pb_w_instance_id_mistmatch(): def test_cluster_from_pb_w_project_mistmatch(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 from google.cloud.bigtable.cluster import Cluster + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 ALT_PROJECT = "ALT_PROJECT" client = _Client(project=ALT_PROJECT) @@ -203,9 +202,9 @@ def test_cluster_from_pb_w_project_mistmatch(): def test_cluster_from_pb_w_autoscaling(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable.cluster import Cluster from google.cloud.bigtable import enums + from google.cloud.bigtable.cluster import Cluster + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 client = _Client(PROJECT) instance = _Instance(INSTANCE_ID, client) @@ -291,9 +290,8 @@ def _make_instance_admin_client(): def test_cluster_reload(): + from google.cloud.bigtable.enums import Cluster, StorageType from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable.enums import StorageType - from google.cloud.bigtable.enums import Cluster credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) @@ -348,8 +346,8 @@ def test_cluster_reload(): def test_cluster_exists_hit(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 from google.cloud.bigtable.instance import Instance + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) @@ -371,9 +369,10 @@ def test_cluster_exists_hit(): def test_cluster_exists_miss(): - from google.cloud.bigtable.instance import Instance from google.api_core import exceptions + from google.cloud.bigtable.instance import Instance + credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) instance = Instance(INSTANCE_ID, client) @@ -390,9 +389,10 @@ def test_cluster_exists_miss(): def test_cluster_exists_w_error(): - from google.cloud.bigtable.instance import Instance from google.api_core import exceptions + from google.cloud.bigtable.instance import Instance + credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) instance = Instance(INSTANCE_ID, client) @@ -410,15 +410,17 @@ def test_cluster_exists_w_error(): def test_cluster_create(): import datetime + + from google.cloud._helpers import _datetime_to_pb_timestamp from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any + + from google.cloud.bigtable.enums import StorageType + from google.cloud.bigtable.instance import Instance from google.cloud.bigtable_admin_v2.types import ( bigtable_instance_admin as messages_v2_pb2, ) - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable.instance import Instance from google.cloud.bigtable_admin_v2.types import instance as instance_v2_pb2 - from google.cloud.bigtable.enums import StorageType NOW = datetime.datetime.now(datetime.timezone.utc) NOW_PB = _datetime_to_pb_timestamp(NOW) @@ -465,15 +467,17 @@ def test_cluster_create(): def test_cluster_create_w_cmek(): import datetime + + from google.cloud._helpers import _datetime_to_pb_timestamp from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any + + from google.cloud.bigtable.enums import StorageType + from google.cloud.bigtable.instance import Instance from google.cloud.bigtable_admin_v2.types import ( bigtable_instance_admin as messages_v2_pb2, ) - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable.instance import Instance from google.cloud.bigtable_admin_v2.types import instance as instance_v2_pb2 - from google.cloud.bigtable.enums import StorageType NOW = datetime.datetime.now(datetime.timezone.utc) NOW_PB = _datetime_to_pb_timestamp(NOW) @@ -525,15 +529,17 @@ def test_cluster_create_w_cmek(): def test_cluster_create_w_autoscaling(): import datetime + + from google.cloud._helpers import _datetime_to_pb_timestamp from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any + + from google.cloud.bigtable.enums import StorageType + from google.cloud.bigtable.instance import Instance from google.cloud.bigtable_admin_v2.types import ( bigtable_instance_admin as messages_v2_pb2, ) - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable.instance import Instance from google.cloud.bigtable_admin_v2.types import instance as instance_v2_pb2 - from google.cloud.bigtable.enums import StorageType NOW = datetime.datetime.now(datetime.timezone.utc) NOW_PB = _datetime_to_pb_timestamp(NOW) @@ -593,14 +599,16 @@ def test_cluster_create_w_autoscaling(): def test_cluster_update(): import datetime + + from google.cloud._helpers import _datetime_to_pb_timestamp from google.longrunning import operations_pb2 from google.protobuf import field_mask_pb2 from google.protobuf.any_pb2 import Any - from google.cloud._helpers import _datetime_to_pb_timestamp + + from google.cloud.bigtable.enums import StorageType from google.cloud.bigtable_admin_v2.types import ( bigtable_instance_admin as messages_v2_pb2, ) - from google.cloud.bigtable.enums import StorageType NOW = datetime.datetime.now(datetime.timezone.utc) NOW_PB = _datetime_to_pb_timestamp(NOW) @@ -660,14 +668,16 @@ def test_cluster_update(): def test_cluster_update_w_autoscaling(): import datetime + + from google.cloud._helpers import _datetime_to_pb_timestamp from google.longrunning import operations_pb2 from google.protobuf import field_mask_pb2 from google.protobuf.any_pb2 import Any - from google.cloud._helpers import _datetime_to_pb_timestamp + + from google.cloud.bigtable.enums import StorageType from google.cloud.bigtable_admin_v2.types import ( bigtable_instance_admin as messages_v2_pb2, ) - from google.cloud.bigtable.enums import StorageType NOW = datetime.datetime.now(datetime.timezone.utc) NOW_PB = _datetime_to_pb_timestamp(NOW) @@ -719,14 +729,16 @@ def test_cluster_update_w_autoscaling(): def test_cluster_update_w_partial_autoscaling_config(): import datetime + + from google.cloud._helpers import _datetime_to_pb_timestamp from google.longrunning import operations_pb2 from google.protobuf import field_mask_pb2 from google.protobuf.any_pb2 import Any - from google.cloud._helpers import _datetime_to_pb_timestamp + + from google.cloud.bigtable.enums import StorageType from google.cloud.bigtable_admin_v2.types import ( bigtable_instance_admin as messages_v2_pb2, ) - from google.cloud.bigtable.enums import StorageType NOW = datetime.datetime.now(datetime.timezone.utc) NOW_PB = _datetime_to_pb_timestamp(NOW) @@ -803,14 +815,16 @@ def test_cluster_update_w_partial_autoscaling_config(): def test_cluster_update_w_both_manual_and_autoscaling(): import datetime + + from google.cloud._helpers import _datetime_to_pb_timestamp from google.longrunning import operations_pb2 from google.protobuf import field_mask_pb2 from google.protobuf.any_pb2 import Any - from google.cloud._helpers import _datetime_to_pb_timestamp + + from google.cloud.bigtable.enums import StorageType from google.cloud.bigtable_admin_v2.types import ( bigtable_instance_admin as messages_v2_pb2, ) - from google.cloud.bigtable.enums import StorageType NOW = datetime.datetime.now(datetime.timezone.utc) NOW_PB = _datetime_to_pb_timestamp(NOW) @@ -863,15 +877,17 @@ def test_cluster_update_w_both_manual_and_autoscaling(): def test_cluster_disable_autoscaling(): import datetime + + from google.cloud._helpers import _datetime_to_pb_timestamp from google.longrunning import operations_pb2 from google.protobuf import field_mask_pb2 from google.protobuf.any_pb2 import Any + + from google.cloud.bigtable.enums import StorageType + from google.cloud.bigtable.instance import Instance from google.cloud.bigtable_admin_v2.types import ( bigtable_instance_admin as messages_v2_pb2, ) - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.bigtable.instance import Instance - from google.cloud.bigtable.enums import StorageType NOW = datetime.datetime.now(datetime.timezone.utc) NOW_PB = _datetime_to_pb_timestamp(NOW) @@ -926,8 +942,8 @@ def test_cluster_disable_autoscaling(): def test_create_cluster_with_both_manual_and_autoscaling(): - from google.cloud.bigtable.instance import Instance from google.cloud.bigtable.enums import StorageType + from google.cloud.bigtable.instance import Instance credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) @@ -953,8 +969,8 @@ def test_create_cluster_with_both_manual_and_autoscaling(): def test_create_cluster_with_partial_autoscaling_config(): - from google.cloud.bigtable.instance import Instance from google.cloud.bigtable.enums import StorageType + from google.cloud.bigtable.instance import Instance credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) @@ -993,8 +1009,8 @@ def test_create_cluster_with_partial_autoscaling_config(): def test_create_cluster_with_no_scaling_config(): - from google.cloud.bigtable.instance import Instance from google.cloud.bigtable.enums import StorageType + from google.cloud.bigtable.instance import Instance credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_column_family.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_column_family.py index 2480e11cba11..0a33785ac97a 100644 --- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_column_family.py +++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_column_family.py @@ -79,6 +79,7 @@ def test_max_age_gc_rule___ne__same_value(): def test_max_age_gc_rule_to_pb(): import datetime + from google.protobuf import duration_pb2 max_age = datetime.timedelta(seconds=1) @@ -123,9 +124,10 @@ def test_gc_rule_union___ne__same_value(): def test_gc_rule_union_to_pb(): import datetime + from google.protobuf import duration_pb2 - from google.cloud.bigtable.column_family import MaxAgeGCRule - from google.cloud.bigtable.column_family import MaxVersionsGCRule + + from google.cloud.bigtable.column_family import MaxAgeGCRule, MaxVersionsGCRule max_num_versions = 42 rule1 = MaxVersionsGCRule(max_num_versions) @@ -144,9 +146,10 @@ def test_gc_rule_union_to_pb(): def test_gc_rule_union_to_pb_nested(): import datetime + from google.protobuf import duration_pb2 - from google.cloud.bigtable.column_family import MaxAgeGCRule - from google.cloud.bigtable.column_family import MaxVersionsGCRule + + from google.cloud.bigtable.column_family import MaxAgeGCRule, MaxVersionsGCRule max_num_versions1 = 42 rule1 = MaxVersionsGCRule(max_num_versions1) @@ -205,9 +208,10 @@ def test_gc_rule_intersection___ne__same_value(): def test_gc_rule_intersection_to_pb(): import datetime + from google.protobuf import duration_pb2 - from google.cloud.bigtable.column_family import MaxAgeGCRule - from google.cloud.bigtable.column_family import MaxVersionsGCRule + + from google.cloud.bigtable.column_family import MaxAgeGCRule, MaxVersionsGCRule max_num_versions = 42 rule1 = MaxVersionsGCRule(max_num_versions) @@ -226,9 +230,10 @@ def test_gc_rule_intersection_to_pb(): def test_gc_rule_intersection_to_pb_nested(): import datetime + from google.protobuf import duration_pb2 - from google.cloud.bigtable.column_family import MaxAgeGCRule - from google.cloud.bigtable.column_family import MaxVersionsGCRule + + from google.cloud.bigtable.column_family import MaxAgeGCRule, MaxVersionsGCRule max_num_versions1 = 42 rule1 = MaxVersionsGCRule(max_num_versions1) @@ -333,13 +338,14 @@ def test_column_family_to_pb_with_rule(): def _create_test_helper(gc_rule=None): + from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( + BaseBigtableTableAdminClient, + ) from google.cloud.bigtable_admin_v2.types import ( bigtable_table_admin as table_admin_v2_pb2, ) + from ._testing import _FakeStub - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - BaseBigtableTableAdminClient, - ) project_id = "project-id" zone = "zone" @@ -404,13 +410,14 @@ def test_column_family_create_with_gc_rule(): def _update_test_helper(gc_rule=None): - from ._testing import _FakeStub - from google.cloud.bigtable_admin_v2.types import ( - bigtable_table_admin as table_admin_v2_pb2, - ) from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( BaseBigtableTableAdminClient, ) + from google.cloud.bigtable_admin_v2.types import ( + bigtable_table_admin as table_admin_v2_pb2, + ) + + from ._testing import _FakeStub project_id = "project-id" zone = "zone" @@ -475,13 +482,15 @@ def test_column_family_update_with_gc_rule(): def test_column_family_delete(): from google.protobuf import empty_pb2 + + from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( + BaseBigtableTableAdminClient, + ) from google.cloud.bigtable_admin_v2.types import ( bigtable_table_admin as table_admin_v2_pb2, ) + from ._testing import _FakeStub - from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( - BaseBigtableTableAdminClient, - ) project_id = "project-id" zone = "zone" @@ -537,8 +546,7 @@ def test__gc_rule_from_pb_empty(): def test__gc_rule_from_pb_max_num_versions(): - from google.cloud.bigtable.column_family import _gc_rule_from_pb - from google.cloud.bigtable.column_family import MaxVersionsGCRule + from google.cloud.bigtable.column_family import MaxVersionsGCRule, _gc_rule_from_pb orig_rule = MaxVersionsGCRule(1) gc_rule_pb = orig_rule.to_pb() @@ -549,8 +557,8 @@ def test__gc_rule_from_pb_max_num_versions(): def test__gc_rule_from_pb_max_age(): import datetime - from google.cloud.bigtable.column_family import _gc_rule_from_pb - from google.cloud.bigtable.column_family import MaxAgeGCRule + + from google.cloud.bigtable.column_family import MaxAgeGCRule, _gc_rule_from_pb orig_rule = MaxAgeGCRule(datetime.timedelta(seconds=1)) gc_rule_pb = orig_rule.to_pb() @@ -561,10 +569,13 @@ def test__gc_rule_from_pb_max_age(): def test__gc_rule_from_pb_union(): import datetime - from google.cloud.bigtable.column_family import _gc_rule_from_pb - from google.cloud.bigtable.column_family import GCRuleUnion - from google.cloud.bigtable.column_family import MaxAgeGCRule - from google.cloud.bigtable.column_family import MaxVersionsGCRule + + from google.cloud.bigtable.column_family import ( + GCRuleUnion, + MaxAgeGCRule, + MaxVersionsGCRule, + _gc_rule_from_pb, + ) rule1 = MaxVersionsGCRule(1) rule2 = MaxAgeGCRule(datetime.timedelta(seconds=1)) @@ -577,10 +588,13 @@ def test__gc_rule_from_pb_union(): def test__gc_rule_from_pb_intersection(): import datetime - from google.cloud.bigtable.column_family import _gc_rule_from_pb - from google.cloud.bigtable.column_family import GCRuleIntersection - from google.cloud.bigtable.column_family import MaxAgeGCRule - from google.cloud.bigtable.column_family import MaxVersionsGCRule + + from google.cloud.bigtable.column_family import ( + GCRuleIntersection, + MaxAgeGCRule, + MaxVersionsGCRule, + _gc_rule_from_pb, + ) rule1 = MaxVersionsGCRule(1) rule2 = MaxAgeGCRule(datetime.timedelta(seconds=1)) diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_encryption_info.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_encryption_info.py index 8b92a83ed980..32e32cffe038 100644 --- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_encryption_info.py +++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_encryption_info.py @@ -16,7 +16,6 @@ from google.cloud.bigtable import enums - EncryptionType = enums.EncryptionInfo.EncryptionType _STATUS_CODE = 123 _STATUS_MESSAGE = "message" diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_instance.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_instance.py index c5ef9c9b8c9b..546049360892 100644 --- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_instance.py +++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_instance.py @@ -16,9 +16,9 @@ import mock import pytest -from ._testing import _make_credentials from google.cloud.bigtable.cluster import Cluster +from ._testing import _make_credentials PROJECT = "project" INSTANCE_ID = "instance-id" @@ -103,8 +103,8 @@ def test_instance_constructor_non_default(): def test_instance__update_from_pb_success(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 from google.cloud.bigtable import enums + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 instance_type = data_v2_pb2.Instance.Type.PRODUCTION state = enums.Instance.State.READY @@ -128,8 +128,8 @@ def test_instance__update_from_pb_success(): def test_instance__update_from_pb_success_defaults(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 from google.cloud.bigtable import enums + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 instance_pb = data_v2_pb2.Instance(display_name=DISPLAY_NAME) @@ -155,9 +155,9 @@ def test_instance__update_from_pb_wo_display_name(): def test_instance_from_pb_success(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 from google.cloud.bigtable import enums from google.cloud.bigtable.instance import Instance + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) @@ -183,8 +183,8 @@ def test_instance_from_pb_success(): def test_instance_from_pb_bad_instance_name(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 from google.cloud.bigtable.instance import Instance + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 instance_name = "INCORRECT_FORMAT" instance_pb = data_v2_pb2.Instance(name=instance_name) @@ -194,8 +194,8 @@ def test_instance_from_pb_bad_instance_name(): def test_instance_from_pb_project_mistmatch(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 from google.cloud.bigtable.instance import Instance + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 ALT_PROJECT = "ALT_PROJECT" credentials = _make_credentials() @@ -268,10 +268,12 @@ def test_instance_create_w_default_storage_type_and_clusters(): def _instance_api_response_for_create(): import datetime + from google.api_core import operation + from google.cloud._helpers import _datetime_to_pb_timestamp from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any - from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import ( bigtable_instance_admin as messages_v2_pb2, ) @@ -304,11 +306,11 @@ def _instance_api_response_for_create(): def test_instance_create(): - from google.cloud.bigtable import enums - from google.cloud.bigtable_admin_v2.types import Instance - from google.cloud.bigtable_admin_v2.types import Cluster import warnings + from google.cloud.bigtable import enums + from google.cloud.bigtable_admin_v2.types import Cluster, Instance + credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) instance = _make_instance( @@ -472,8 +474,8 @@ def test_instance_exists_w_error(): def test_instance_reload(): - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 from google.cloud.bigtable import enums + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 DISPLAY_NAME = "hey-hi-hello" credentials = _make_credentials() @@ -494,10 +496,12 @@ def test_instance_reload(): def _instance_api_response_for_update(): import datetime + from google.api_core import operation + from google.cloud._helpers import _datetime_to_pb_timestamp from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any - from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.bigtable_admin_v2.types import ( bigtable_instance_admin as messages_v2_pb2, ) @@ -527,8 +531,9 @@ def _instance_api_response_for_update(): def test_instance_update(): - from google.cloud.bigtable import enums from google.protobuf import field_mask_pb2 + + from google.cloud.bigtable import enums from google.cloud.bigtable_admin_v2.types import Instance credentials = _make_credentials() @@ -562,6 +567,7 @@ def test_instance_update(): def test_instance_update_empty(): from google.protobuf import field_mask_pb2 + from google.cloud.bigtable_admin_v2.types import Instance credentials = _make_credentials() @@ -603,6 +609,7 @@ def test_instance_delete(): def test_instance_get_iam_policy(): from google.iam.v1 import policy_pb2 + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE credentials = _make_credentials() @@ -630,7 +637,8 @@ def test_instance_get_iam_policy(): def test_instance_get_iam_policy_w_requested_policy_version(): - from google.iam.v1 import policy_pb2, options_pb2 + from google.iam.v1 import options_pb2, policy_pb2 + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE credentials = _make_credentials() @@ -665,8 +673,8 @@ def test_instance_get_iam_policy_w_requested_policy_version(): def test_instance_set_iam_policy(): from google.iam.v1 import policy_pb2 - from google.cloud.bigtable.policy import Policy - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE + + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE, Policy credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) @@ -745,12 +753,11 @@ def test_instance_cluster_factory(): def test_instance_list_clusters(): + from google.cloud.bigtable.instance import Cluster, Instance from google.cloud.bigtable_admin_v2.types import ( bigtable_instance_admin as messages_v2_pb2, ) from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 - from google.cloud.bigtable.instance import Instance - from google.cloud.bigtable.instance import Cluster credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) @@ -801,13 +808,13 @@ def test_instance_table_factory(): def _list_tables_helper(table_name=None): - from google.cloud.bigtable_admin_v2.types import table as table_data_v2_pb2 - from google.cloud.bigtable_admin_v2.types import ( - bigtable_table_admin as table_messages_v1_pb2, - ) from google.cloud.bigtable_admin_v2.services.bigtable_table_admin import ( BaseBigtableTableAdminClient, ) + from google.cloud.bigtable_admin_v2.types import ( + bigtable_table_admin as table_messages_v1_pb2, + ) + from google.cloud.bigtable_admin_v2.types import table as table_data_v2_pb2 credentials = _make_credentials() client = _make_client(project=PROJECT, credentials=credentials, admin=True) @@ -887,10 +894,10 @@ def test_instance_app_profile_factory(): def test_instance_list_app_profiles(): - from google.api_core.page_iterator import Iterator - from google.api_core.page_iterator import Page - from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 + from google.api_core.page_iterator import Iterator, Page + from google.cloud.bigtable.app_profile import AppProfile + from google.cloud.bigtable_admin_v2.types import instance as data_v2_pb2 class _Iterator(Iterator): def __init__(self, pages): diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_policy.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_policy.py index 77674517e0d8..8ebfebf2cc18 100644 --- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_policy.py +++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_policy.py @@ -89,6 +89,7 @@ def test_policy_bigtable_viewers(): def test_policy_from_pb_w_empty(): from google.iam.v1 import policy_pb2 + from google.cloud.bigtable.policy import Policy empty = frozenset() @@ -106,8 +107,8 @@ def test_policy_from_pb_w_empty(): def test_policy_from_pb_w_non_empty(): from google.iam.v1 import policy_pb2 - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE - from google.cloud.bigtable.policy import Policy + + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE, Policy ETAG = b"ETAG" VERSION = 1 @@ -130,11 +131,11 @@ def test_policy_from_pb_w_non_empty(): def test_policy_from_pb_w_condition(): - import pytest + from google.api_core.iam import _DICT_ACCESS_MSG, InvalidOperationException from google.iam.v1 import policy_pb2 - from google.api_core.iam import InvalidOperationException, _DICT_ACCESS_MSG - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE - from google.cloud.bigtable.policy import Policy + import pytest + + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE, Policy ETAG = b"ETAG" VERSION = 3 @@ -184,6 +185,7 @@ def test_policy_to_pb_empty(): def test_policy_to_pb_explicit(): from google.iam.v1 import policy_pb2 + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE VERSION = 1 @@ -204,6 +206,7 @@ def test_policy_to_pb_explicit(): def test_policy_to_pb_w_condition(): from google.iam.v1 import policy_pb2 + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE VERSION = 3 @@ -252,6 +255,7 @@ def test_policy_from_api_repr_wo_etag(): def test_policy_from_api_repr_w_etag(): import base64 + from google.cloud.bigtable.policy import Policy ETAG = b"ETAG" diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row.py index f04802f5cc07..b22cd69b3d08 100644 --- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row.py +++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row.py @@ -174,6 +174,7 @@ def test_direct_row_set_cell_with_non_bytes_value(): def test_direct_row_set_cell_with_non_null_timestamp(): import datetime + from google.cloud._helpers import _EPOCH microseconds = 898294371 @@ -292,7 +293,9 @@ def test_direct_row_delete_cells_no_time_range(): def test_direct_row_delete_cells_with_time_range(): import datetime + from google.cloud._helpers import _EPOCH + from google.cloud.bigtable.row_filters import TimestampRange microseconds = 30871000 # Makes sure already milliseconds granularity @@ -466,6 +469,7 @@ def test_conditional_row_commit(): def test_conditional_row_commit_too_many_mutations(): from google.cloud._testing import _Monkey + from google.cloud.bigtable import row as MUT row_key = b"row_key" @@ -564,6 +568,7 @@ def test_append_row_increment_cell_value(): def test_append_row_commit(): from google.cloud._testing import _Monkey + from google.cloud.bigtable import row as MUT from google.cloud.bigtable_v2.services.bigtable import BigtableClient @@ -630,6 +635,7 @@ def test_append_row_commit_no_rules(): def test_append_row_commit_too_many_mutations(): from google.cloud._testing import _Monkey + from google.cloud.bigtable import row as MUT row_key = b"row_key" @@ -644,6 +650,7 @@ def test_append_row_commit_too_many_mutations(): def test__parse_rmw_row_response(): from google.cloud._helpers import _datetime_from_microseconds + from google.cloud.bigtable.row import _parse_rmw_row_response col_fam1 = "col-fam-id" @@ -700,6 +707,7 @@ def test__parse_rmw_row_response(): def test__parse_family_pb(): from google.cloud._helpers import _datetime_from_microseconds + from google.cloud.bigtable.row import _parse_family_pb col_fam1 = "col-fam-id" diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_data.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_data.py index 7c2987b56d18..c24f3c0faeb3 100644 --- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_data.py +++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_data.py @@ -34,9 +34,11 @@ def _make_cell(*args, **kwargs): def _cell_from_pb_test_helper(labels=None): import datetime + from google.cloud._helpers import _EPOCH - from google.cloud.bigtable_v2.types import data as data_v2_pb2 + from google.cloud.bigtable.row_data import Cell + from google.cloud.bigtable_v2.types import data as data_v2_pb2 timestamp = _EPOCH + datetime.timedelta(microseconds=TIMESTAMP_MICROS) value = b"value-bytes" @@ -267,8 +269,7 @@ def test_partial_row_data_row_key_getter(): def _make_grpc_call_error(exception): - from grpc import Call - from grpc import RpcError + from grpc import Call, RpcError class TestingException(Call, RpcError): def __init__(self, exception): @@ -288,6 +289,7 @@ def trailing_metadata(self): def test__retry_read_rows_exception_miss(): from google.api_core.exceptions import Conflict + from google.cloud.bigtable.row_data import _retry_read_rows_exception exception = Conflict("testing") @@ -296,6 +298,7 @@ def test__retry_read_rows_exception_miss(): def test__retry_read_rows_exception_service_unavailable(): from google.api_core.exceptions import ServiceUnavailable + from google.cloud.bigtable.row_data import _retry_read_rows_exception exception = ServiceUnavailable("testing") @@ -304,6 +307,7 @@ def test__retry_read_rows_exception_service_unavailable(): def test__retry_read_rows_exception_deadline_exceeded(): from google.api_core.exceptions import DeadlineExceeded + from google.cloud.bigtable.row_data import _retry_read_rows_exception exception = DeadlineExceeded("testing") @@ -312,9 +316,10 @@ def test__retry_read_rows_exception_deadline_exceeded(): def test__retry_read_rows_exception_internal_server_not_retriable(): from google.api_core.exceptions import InternalServerError + from google.cloud.bigtable.row_data import ( - _retry_read_rows_exception, RETRYABLE_INTERNAL_ERROR_MESSAGES, + _retry_read_rows_exception, ) err_message = "500 Error" @@ -325,9 +330,10 @@ def test__retry_read_rows_exception_internal_server_not_retriable(): def test__retry_read_rows_exception_internal_server_retriable(): from google.api_core.exceptions import InternalServerError + from google.cloud.bigtable.row_data import ( - _retry_read_rows_exception, RETRYABLE_INTERNAL_ERROR_MESSAGES, + _retry_read_rows_exception, ) for err_message in RETRYABLE_INTERNAL_ERROR_MESSAGES: @@ -337,6 +343,7 @@ def test__retry_read_rows_exception_internal_server_retriable(): def test__retry_read_rows_exception_miss_wrapped_in_grpc(): from google.api_core.exceptions import Conflict + from google.cloud.bigtable.row_data import _retry_read_rows_exception wrapped = Conflict("testing") @@ -346,6 +353,7 @@ def test__retry_read_rows_exception_miss_wrapped_in_grpc(): def test__retry_read_rows_exception_service_unavailable_wrapped_in_grpc(): from google.api_core.exceptions import ServiceUnavailable + from google.cloud.bigtable.row_data import _retry_read_rows_exception wrapped = ServiceUnavailable("testing") @@ -355,6 +363,7 @@ def test__retry_read_rows_exception_service_unavailable_wrapped_in_grpc(): def test__retry_read_rows_exception_deadline_exceeded_wrapped_in_grpc(): from google.api_core.exceptions import DeadlineExceeded + from google.cloud.bigtable.row_data import _retry_read_rows_exception wrapped = DeadlineExceeded("testing") @@ -1099,8 +1108,8 @@ def test_RRRM_build_updated_request_last_row_read_raises_invalid_retry_request() def test_RRRM_build_updated_request_row_ranges_read_raises_invalid_retry_request(): - from google.cloud.bigtable.row_data import InvalidRetryRequest from google.cloud.bigtable import row_set + from google.cloud.bigtable.row_data import InvalidRetryRequest row_range1 = row_set.RowRange(b"row_key21", b"row_key29") @@ -1176,6 +1185,7 @@ def _ReadRowsResponseV2(chunks, last_scanned_row_key=b""): def _generate_cell_chunks(chunk_text_pbs): from google.protobuf.text_format import Merge + from google.cloud.bigtable_v2.types.bigtable import ReadRowsResponse chunks = [] diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_filters.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_filters.py index b312cb942fdd..447ca95b0ee4 100644 --- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_filters.py +++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_filters.py @@ -242,7 +242,9 @@ def test_timestamp_range___ne__same_value(): def _timestamp_range_to_pb_helper(pb_kwargs, start=None, end=None): import datetime + from google.cloud._helpers import _EPOCH + from google.cloud.bigtable.row_filters import TimestampRange if start is not None: @@ -327,8 +329,7 @@ def test_timestamp_range_filter___ne__(): def test_timestamp_range_filter_to_pb(): - from google.cloud.bigtable.row_filters import TimestampRangeFilter - from google.cloud.bigtable.row_filters import TimestampRange + from google.cloud.bigtable.row_filters import TimestampRange, TimestampRangeFilter range_ = TimestampRange() row_filter = TimestampRangeFilter(range_) @@ -557,6 +558,7 @@ def test_exact_value_filter_to_pb_w_str(): def test_exact_value_filter_to_pb_w_int(): import struct + from google.cloud.bigtable.row_filters import ExactValueFilter value = 1 @@ -600,9 +602,10 @@ def test_value_range_filter_constructor_explicit(): def test_value_range_filter_constructor_w_int_values(): - from google.cloud.bigtable.row_filters import ValueRangeFilter import struct + from google.cloud.bigtable.row_filters import ValueRangeFilter + start_value = 1 end_value = 10 @@ -897,9 +900,11 @@ def test_filter_combination___ne__(): def test_row_filter_chain_to_pb(): - from google.cloud.bigtable.row_filters import RowFilterChain - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.row_filters import ( + RowFilterChain, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter1_pb = row_filter1.to_pb() @@ -917,10 +922,12 @@ def test_row_filter_chain_to_pb(): def test_row_filter_chain_to_pb_nested(): - from google.cloud.bigtable.row_filters import CellsRowLimitFilter - from google.cloud.bigtable.row_filters import RowFilterChain - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.row_filters import ( + CellsRowLimitFilter, + RowFilterChain, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter2 = RowSampleFilter(0.25) @@ -941,9 +948,11 @@ def test_row_filter_chain_to_pb_nested(): def test_row_filter_union_to_pb(): - from google.cloud.bigtable.row_filters import RowFilterUnion - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.row_filters import ( + RowFilterUnion, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter1_pb = row_filter1.to_pb() @@ -961,10 +970,12 @@ def test_row_filter_union_to_pb(): def test_row_filter_union_to_pb_nested(): - from google.cloud.bigtable.row_filters import CellsRowLimitFilter - from google.cloud.bigtable.row_filters import RowFilterUnion - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.row_filters import ( + CellsRowLimitFilter, + RowFilterUnion, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter2 = RowSampleFilter(0.25) @@ -1043,10 +1054,12 @@ def test_conditional_row_filter___ne__(): def test_conditional_row_filter_to_pb(): - from google.cloud.bigtable.row_filters import ConditionalRowFilter - from google.cloud.bigtable.row_filters import CellsRowOffsetFilter - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.row_filters import ( + CellsRowOffsetFilter, + ConditionalRowFilter, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter1_pb = row_filter1.to_pb() @@ -1073,9 +1086,11 @@ def test_conditional_row_filter_to_pb(): def test_conditional_row_filter_to_pb_true_only(): - from google.cloud.bigtable.row_filters import ConditionalRowFilter - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.row_filters import ( + ConditionalRowFilter, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter1_pb = row_filter1.to_pb() @@ -1095,9 +1110,11 @@ def test_conditional_row_filter_to_pb_true_only(): def test_conditional_row_filter_to_pb_false_only(): - from google.cloud.bigtable.row_filters import ConditionalRowFilter - from google.cloud.bigtable.row_filters import RowSampleFilter - from google.cloud.bigtable.row_filters import StripValueTransformerFilter + from google.cloud.bigtable.row_filters import ( + ConditionalRowFilter, + RowSampleFilter, + StripValueTransformerFilter, + ) row_filter1 = StripValueTransformerFilter(True) row_filter1_pb = row_filter1.to_pb() diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_merger.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_merger.py index 483c04536666..7d4dccdbf02f 100644 --- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_merger.py +++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_merger.py @@ -1,13 +1,13 @@ -import os from itertools import zip_longest +import os from typing import List import proto import pytest -from google.cloud.bigtable.row_data import PartialRowsData, PartialRowData, InvalidChunk -from google.cloud.bigtable_v2.types.bigtable import ReadRowsResponse +from google.cloud.bigtable.row_data import InvalidChunk, PartialRowData, PartialRowsData from google.cloud.bigtable.row_merger import _RowMerger +from google.cloud.bigtable_v2.types.bigtable import ReadRowsResponse # TODO: autogenerate protos from diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_set.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_set.py index 1a33be7202e4..4142348ee195 100644 --- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_set.py +++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_row_set.py @@ -22,8 +22,7 @@ def test_row_set_constructor(): def test_row_set__eq__(): - from google.cloud.bigtable.row_set import RowRange - from google.cloud.bigtable.row_set import RowSet + from google.cloud.bigtable.row_set import RowRange, RowSet row_key1 = b"row_key1" row_key2 = b"row_key1" @@ -66,8 +65,7 @@ def test_row_set__eq__len_row_keys_differ(): def test_row_set__eq__len_row_ranges_differ(): - from google.cloud.bigtable.row_set import RowRange - from google.cloud.bigtable.row_set import RowSet + from google.cloud.bigtable.row_set import RowRange, RowSet row_range1 = RowRange(b"row_key4", b"row_key9") row_range2 = RowRange(b"row_key4", b"row_key9") @@ -99,8 +97,7 @@ def test_row_set__eq__row_keys_differ(): def test_row_set__eq__row_ranges_differ(): - from google.cloud.bigtable.row_set import RowRange - from google.cloud.bigtable.row_set import RowSet + from google.cloud.bigtable.row_set import RowRange, RowSet row_range1 = RowRange(b"row_key4", b"row_key9") row_range2 = RowRange(b"row_key14", b"row_key19") @@ -119,8 +116,7 @@ def test_row_set__eq__row_ranges_differ(): def test_row_set__ne__(): - from google.cloud.bigtable.row_set import RowRange - from google.cloud.bigtable.row_set import RowSet + from google.cloud.bigtable.row_set import RowRange, RowSet row_key1 = b"row_key1" row_key2 = b"row_key1" @@ -139,8 +135,7 @@ def test_row_set__ne__(): def test_row_set__ne__same_value(): - from google.cloud.bigtable.row_set import RowRange - from google.cloud.bigtable.row_set import RowSet + from google.cloud.bigtable.row_set import RowRange, RowSet row_key1 = b"row_key1" row_key2 = b"row_key1" @@ -168,8 +163,7 @@ def test_row_set_add_row_key(): def test_row_set_add_row_range(): - from google.cloud.bigtable.row_set import RowRange - from google.cloud.bigtable.row_set import RowSet + from google.cloud.bigtable.row_set import RowRange, RowSet row_set = RowSet() row_range1 = RowRange(b"row_key1", b"row_key9") @@ -203,8 +197,8 @@ def test_row_set_add_row_range_with_prefix(): def test_row_set__update_message_request(): from google.cloud._helpers import _to_bytes - from google.cloud.bigtable.row_set import RowRange - from google.cloud.bigtable.row_set import RowSet + + from google.cloud.bigtable.row_set import RowRange, RowSet row_set = RowSet() table_name = "table_name" diff --git a/packages/google-cloud-bigtable/tests/unit/v2_client/test_table.py b/packages/google-cloud-bigtable/tests/unit/v2_client/test_table.py index 6b31a5e23148..a24e78cea5a3 100644 --- a/packages/google-cloud-bigtable/tests/unit/v2_client/test_table.py +++ b/packages/google-cloud-bigtable/tests/unit/v2_client/test_table.py @@ -15,11 +15,11 @@ import warnings +from google.api_core.exceptions import DeadlineExceeded +from grpc import StatusCode import mock import pytest -from grpc import StatusCode -from google.api_core.exceptions import DeadlineExceeded from ._testing import _make_credentials PROJECT_ID = "project-id" @@ -53,8 +53,10 @@ @mock.patch("google.cloud.bigtable.table._MAX_BULK_MUTATIONS", new=3) def test__compile_mutation_entries_w_too_many_mutations(): from google.cloud.bigtable.row import DirectRow - from google.cloud.bigtable.table import TooManyMutationsError - from google.cloud.bigtable.table import _compile_mutation_entries + from google.cloud.bigtable.table import ( + TooManyMutationsError, + _compile_mutation_entries, + ) table = mock.Mock(name="table", spec=["name"]) table.name = "table" @@ -74,8 +76,7 @@ def test__compile_mutation_entries_w_too_many_mutations(): def test__compile_mutation_entries_normal(): from google.cloud.bigtable.row import DirectRow from google.cloud.bigtable.table import _compile_mutation_entries - from google.cloud.bigtable_v2.types import MutateRowsRequest - from google.cloud.bigtable_v2.types import data + from google.cloud.bigtable_v2.types import MutateRowsRequest, data table = mock.Mock(spec=["name"]) table.name = "table" @@ -109,9 +110,8 @@ def test__compile_mutation_entries_normal(): def test__check_row_table_name_w_wrong_table_name(): - from google.cloud.bigtable.table import _check_row_table_name - from google.cloud.bigtable.table import TableMismatchError from google.cloud.bigtable.row import DirectRow + from google.cloud.bigtable.table import TableMismatchError, _check_row_table_name table = mock.Mock(name="table", spec=["name"]) table.name = "table" @@ -353,11 +353,11 @@ def _make_table_api(): def _create_table_helper(split_keys=[], column_families={}): - from google.cloud.bigtable_admin_v2.types import table as table_pb2 + from google.cloud.bigtable.column_family import ColumnFamily from google.cloud.bigtable_admin_v2.types import ( bigtable_table_admin as table_admin_messages_v2_pb2, ) - from google.cloud.bigtable.column_family import ColumnFamily + from google.cloud.bigtable_admin_v2.types import table as table_pb2 credentials = _make_credentials() client = _make_client(project="project-id", credentials=credentials, admin=True) @@ -402,9 +402,8 @@ def test_table_create_with_split_keys(): def test_table_exists_hit(): - from google.cloud.bigtable_admin_v2.types import ListTablesResponse - from google.cloud.bigtable_admin_v2.types import Table from google.cloud.bigtable import enums + from google.cloud.bigtable_admin_v2.types import ListTablesResponse, Table credentials = _make_credentials() client = _make_client(project="project-id", credentials=credentials, admin=True) @@ -426,6 +425,7 @@ def test_table_exists_hit(): def test_table_exists_miss(): from google.api_core.exceptions import NotFound + from google.cloud.bigtable import enums credentials = _make_credentials() @@ -447,6 +447,7 @@ def test_table_exists_miss(): def test_table_exists_error(): from google.api_core.exceptions import BadRequest + from google.cloud.bigtable import enums credentials = _make_credentials() @@ -557,6 +558,7 @@ def test_table_get_cluster_states(): def test_table_get_encryption_info(): from google.rpc.code_pb2 import Code + from google.cloud.bigtable.encryption_info import EncryptionInfo from google.cloud.bigtable.enums import EncryptionInfo as enum_crypto from google.cloud.bigtable.enums import Table as enum_table @@ -640,10 +642,11 @@ def _make_data_api(): def _table_read_row_helper(chunks, expected_result, app_profile_id=None): from google.cloud._testing import _Monkey + from google.cloud.bigtable import table as MUT - from google.cloud.bigtable.row_set import RowSet - from google.cloud.bigtable.row_filters import RowSampleFilter from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS + from google.cloud.bigtable.row_filters import RowSampleFilter + from google.cloud.bigtable.row_set import RowSet credentials = _make_credentials() client = _make_client(project="project-id", credentials=credentials, admin=True) @@ -707,8 +710,7 @@ def test_table_read_row_miss_no_chunks_in_response(): def test_table_read_row_complete(): - from google.cloud.bigtable.row_data import Cell - from google.cloud.bigtable.row_data import PartialRowData + from google.cloud.bigtable.row_data import Cell, PartialRowData app_profile_id = "app-profile-id" chunk = _ReadRowsResponseCellChunkPB( @@ -771,6 +773,7 @@ def _table_mutate_rows_helper( mutation_timeout=None, app_profile_id=None, retry=None, timeout=None ): from google.rpc.status_pb2 import Status + from google.cloud.bigtable.table import DEFAULT_RETRY credentials = _make_credentials() @@ -857,9 +860,9 @@ def test_table_mutate_rows_w_mutation_timeout_and_timeout_arg(): def test_table_read_rows(): from google.cloud._testing import _Monkey - from google.cloud.bigtable.row_data import PartialRowsData + from google.cloud.bigtable import table as MUT - from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS + from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS, PartialRowsData credentials = _make_credentials() client = _make_client(project="project-id", credentials=credentials, admin=True) @@ -1082,10 +1085,9 @@ def test_table_yield_retry_rows(): def test_table_yield_rows_with_row_set(): - from google.cloud.bigtable.row_set import RowSet - from google.cloud.bigtable.row_set import RowRange - from google.cloud.bigtable.table import _create_row_request from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS + from google.cloud.bigtable.row_set import RowRange, RowSet + from google.cloud.bigtable.table import _create_row_request credentials = _make_credentials() client = _make_client(project="project-id", credentials=credentials, admin=True) @@ -1261,6 +1263,7 @@ def test_table_mutations_batcher_factory(): def test_table_get_iam_policy(): from google.iam.v1 import policy_pb2 + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE credentials = _make_credentials() @@ -1292,8 +1295,8 @@ def test_table_get_iam_policy(): def test_table_set_iam_policy(): from google.iam.v1 import policy_pb2 - from google.cloud.bigtable.policy import Policy - from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE + + from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE, Policy credentials = _make_credentials() client = _make_client(project="project-id", credentials=credentials, admin=True) @@ -1378,6 +1381,7 @@ def test_table_backup_factory_defaults(): def test_table_backup_factory_non_defaults(): import datetime + from google.cloud.bigtable.backup import Backup from google.cloud.bigtable.instance import Instance @@ -1405,11 +1409,9 @@ def test_table_backup_factory_non_defaults(): def _table_list_backups_helper(cluster_id=None, filter_=None, **kwargs): - from google.cloud.bigtable_admin_v2.types import ( - Backup as backup_pb, - bigtable_table_admin, - ) from google.cloud.bigtable.backup import Backup + from google.cloud.bigtable_admin_v2.types import Backup as backup_pb + from google.cloud.bigtable_admin_v2.types import bigtable_table_admin client = _make_client( project=PROJECT_ID, credentials=_make_credentials(), admin=True @@ -1520,9 +1522,10 @@ def _make_responses_statuses(codes): def _make_responses(codes): - from google.cloud.bigtable_v2.types.bigtable import MutateRowsResponse from google.rpc.status_pb2 import Status + from google.cloud.bigtable_v2.types.bigtable import MutateRowsResponse + entries = [ MutateRowsResponse.Entry(index=i, status=Status(code=codes[i])) for i in range(len(codes)) @@ -1645,6 +1648,7 @@ def _do_mutate_retryable_rows_helper( mutate_rows_side_effect=None, ): from google.api_core.exceptions import ServiceUnavailable + from google.cloud.bigtable.row import DirectRow from google.cloud.bigtable.table import _BigtableRetryableError from google.cloud.bigtable_v2.types import bigtable as data_messages_v2_pb2 @@ -1801,6 +1805,7 @@ def test_rmrw_do_mutate_retryable_rows_w_retryable_error_internal_rst_stream_err # Raise internal server error with RST STREAM error messages # There should be no error raised and that the request is retried from google.api_core.exceptions import InternalServerError + from google.cloud.bigtable.row_data import RETRYABLE_INTERNAL_ERROR_MESSAGES row_cells = [ @@ -2079,8 +2084,8 @@ def test__create_row_request_row_range_both_keys_inclusive(): def test__create_row_request_with_filter(): - from google.cloud.bigtable.table import _create_row_request from google.cloud.bigtable.row_filters import RowSampleFilter + from google.cloud.bigtable.table import _create_row_request table_name = "table_name" row_filter = RowSampleFilter(0.33) @@ -2102,8 +2107,8 @@ def test__create_row_request_with_limit(): def test__create_row_request_with_row_set(): - from google.cloud.bigtable.table import _create_row_request from google.cloud.bigtable.row_set import RowSet + from google.cloud.bigtable.table import _create_row_request table_name = "table_name" row_set = RowSet() diff --git a/packages/google-cloud-billing-budgets/.repo-metadata.json b/packages/google-cloud-billing-budgets/.repo-metadata.json index e6ee3c2281e9..98fff06697a8 100644 --- a/packages/google-cloud-billing-budgets/.repo-metadata.json +++ b/packages/google-cloud-billing-budgets/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "The Cloud Billing Budget API stores Cloud Billing budgets, which define a budget plan and the rules to execute as spend is tracked against that plan.", - "api_id": "billingbudgets.googleapis.com", - "api_shortname": "billingbudgets", - "client_documentation": "https://cloud.google.com/python/docs/reference/billingbudgets/latest", - "default_version": "v1", - "distribution_name": "google-cloud-billing-budgets", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559770", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "billingbudgets", - "name_pretty": "Cloud Billing Budget", - "product_documentation": "https://cloud.google.com/billing/docs/how-to/budget-api-overview", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "The Cloud Billing Budget API stores Cloud Billing budgets, which define a budget plan and the rules to execute as spend is tracked against that plan.", + "api_id": "billingbudgets.googleapis.com", + "api_shortname": "billingbudgets", + "client_documentation": "https://cloud.google.com/python/docs/reference/billingbudgets/latest", + "default_version": "v1", + "distribution_name": "google-cloud-billing-budgets", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559770", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "billingbudgets", + "name_pretty": "Cloud Billing Budget", + "product_documentation": "https://cloud.google.com/billing/docs/how-to/budget-api-overview", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-billing/.repo-metadata.json b/packages/google-cloud-billing/.repo-metadata.json index 2d8a46f362c3..471b1912a7b0 100644 --- a/packages/google-cloud-billing/.repo-metadata.json +++ b/packages/google-cloud-billing/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "allows developers to manage their billing accounts or browse the catalog of SKUs and pricing.", - "api_id": "cloudbilling.googleapis.com", - "api_shortname": "cloudbilling", - "client_documentation": "https://cloud.google.com/python/docs/reference/cloudbilling/latest", - "default_version": "v1", - "distribution_name": "google-cloud-billing", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "cloudbilling", - "name_pretty": "Cloud Billing", - "product_documentation": "https://cloud.google.com/billing", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "allows developers to manage their billing accounts or browse the catalog of SKUs and pricing.", + "api_id": "cloudbilling.googleapis.com", + "api_shortname": "cloudbilling", + "client_documentation": "https://cloud.google.com/python/docs/reference/cloudbilling/latest", + "default_version": "v1", + "distribution_name": "google-cloud-billing", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "cloudbilling", + "name_pretty": "Cloud Billing", + "product_documentation": "https://cloud.google.com/billing", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-binary-authorization/.repo-metadata.json b/packages/google-cloud-binary-authorization/.repo-metadata.json index 190a4275ed5a..7f6deef1d50d 100644 --- a/packages/google-cloud-binary-authorization/.repo-metadata.json +++ b/packages/google-cloud-binary-authorization/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": " is a service on Google Cloud that provides centralized software supply-chain security for applications that run on Google Kubernetes Engine (GKE) and Anthos clusters on VMware", - "api_id": "binaryauthorization.googleapis.com", - "api_shortname": "binaryauthorization", - "client_documentation": "https://cloud.google.com/python/docs/reference/binaryauthorization/latest", - "default_version": "v1", - "distribution_name": "google-cloud-binary-authorization", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "binaryauthorization", - "name_pretty": "Binary Authorization", - "product_documentation": "https://cloud.google.com/binary-authorization", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": " is a service on Google Cloud that provides centralized software supply-chain security for applications that run on Google Kubernetes Engine (GKE) and Anthos clusters on VMware", + "api_id": "binaryauthorization.googleapis.com", + "api_shortname": "binaryauthorization", + "client_documentation": "https://cloud.google.com/python/docs/reference/binaryauthorization/latest", + "default_version": "v1", + "distribution_name": "google-cloud-binary-authorization", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "binaryauthorization", + "name_pretty": "Binary Authorization", + "product_documentation": "https://cloud.google.com/binary-authorization", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-build/.repo-metadata.json b/packages/google-cloud-build/.repo-metadata.json index 59423cabf73f..72ce65d789df 100644 --- a/packages/google-cloud-build/.repo-metadata.json +++ b/packages/google-cloud-build/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "lets you build software quickly across all languages. Get complete control over defining custom workflows for building, testing, and deploying across multiple environments such as VMs, serverless, Kubernetes, or Firebase.", - "api_id": "cloudbuild.googleapis.com", - "api_shortname": "cloudbuild", - "client_documentation": "https://cloud.google.com/python/docs/reference/cloudbuild/latest", - "default_version": "v1", - "distribution_name": "google-cloud-build", - "issue_tracker": "https://issuetracker.google.com/savedsearches/5226584", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "cloudbuild", - "name_pretty": "Cloud Build", - "product_documentation": "https://cloud.google.com/cloud-build/docs/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": false + "api_description": "lets you build software quickly across all languages. Get complete control over defining custom workflows for building, testing, and deploying across multiple environments such as VMs, serverless, Kubernetes, or Firebase.", + "api_id": "cloudbuild.googleapis.com", + "api_shortname": "cloudbuild", + "client_documentation": "https://cloud.google.com/python/docs/reference/cloudbuild/latest", + "default_version": "v1", + "distribution_name": "google-cloud-build", + "issue_tracker": "https://issuetracker.google.com/savedsearches/5226584", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "cloudbuild", + "name_pretty": "Cloud Build", + "product_documentation": "https://cloud.google.com/cloud-build/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-capacityplanner/.repo-metadata.json b/packages/google-cloud-capacityplanner/.repo-metadata.json index b94a0a6dd9d8..807647637a87 100644 --- a/packages/google-cloud-capacityplanner/.repo-metadata.json +++ b/packages/google-cloud-capacityplanner/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Provides programmatic access to Capacity Planner features.", - "api_id": "capacityplanner.googleapis.com", - "api_shortname": "capacityplanner", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-capacityplanner/latest", - "default_version": "v1beta", - "distribution_name": "google-cloud-capacityplanner", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1194830", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-capacityplanner", - "name_pretty": "Capacity Planner API", - "product_documentation": "https://cloud.google.com/capacity-planner/docs", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" -} + "api_description": "Provides programmatic access to Capacity Planner features.", + "api_id": "capacityplanner.googleapis.com", + "api_shortname": "capacityplanner", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-capacityplanner/latest", + "default_version": "v1beta", + "distribution_name": "google-cloud-capacityplanner", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1194830", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-capacityplanner", + "name_pretty": "Capacity Planner API", + "product_documentation": "https://cloud.google.com/capacity-planner/docs", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-certificate-manager/.repo-metadata.json b/packages/google-cloud-certificate-manager/.repo-metadata.json index b912727d2253..766fe6046d9d 100644 --- a/packages/google-cloud-certificate-manager/.repo-metadata.json +++ b/packages/google-cloud-certificate-manager/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "lets you acquire and manage TLS (SSL) certificates for use with Cloud Load Balancing.", - "api_id": "certificatemanager.googleapis.com", - "api_shortname": "certificatemanager", - "client_documentation": "https://cloud.google.com/python/docs/reference/certificatemanager/latest", - "default_version": "v1", - "distribution_name": "google-cloud-certificate-manager", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "certificatemanager", - "name_pretty": "Certificate Manager", - "product_documentation": "https://cloud.google.com/python/docs/reference/certificatemanager/latest", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "lets you acquire and manage TLS (SSL) certificates for use with Cloud Load Balancing.", + "api_id": "certificatemanager.googleapis.com", + "api_shortname": "certificatemanager", + "client_documentation": "https://cloud.google.com/python/docs/reference/certificatemanager/latest", + "default_version": "v1", + "distribution_name": "google-cloud-certificate-manager", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "certificatemanager", + "name_pretty": "Certificate Manager", + "product_documentation": "https://cloud.google.com/python/docs/reference/certificatemanager/latest", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-ces/.repo-metadata.json b/packages/google-cloud-ces/.repo-metadata.json index 794ae01f8104..edef9673d093 100644 --- a/packages/google-cloud-ces/.repo-metadata.json +++ b/packages/google-cloud-ces/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "", - "api_id": "ces.googleapis.com", - "api_shortname": "ces", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-ces/latest", - "default_version": "v1", - "distribution_name": "google-cloud-ces", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1157150", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-ces", - "name_pretty": "Gemini Enterprise for Customer Experience API", - "product_documentation": "https://docs.cloud.google.com/customer-engagement-ai/conversational-agents/ps", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" -} + "api_id": "ces.googleapis.com", + "api_shortname": "ces", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-ces/latest", + "default_version": "v1", + "distribution_name": "google-cloud-ces", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1157150", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-ces", + "name_pretty": "Gemini Enterprise for Customer Experience API", + "product_documentation": "https://docs.cloud.google.com/customer-engagement-ai/conversational-agents/ps", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-channel/.repo-metadata.json b/packages/google-cloud-channel/.repo-metadata.json index e15da6838c4d..57a62a18d145 100644 --- a/packages/google-cloud-channel/.repo-metadata.json +++ b/packages/google-cloud-channel/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "With Channel Services, Google Cloud partners and resellers have a single unified resale platform, with a unified resale catalog, customer management, order management, billing management, policy and authorization management, and cost management.", - "api_id": "cloudchannel.googleapis.com", - "api_shortname": "cloudchannel", - "client_documentation": "https://cloud.google.com/python/docs/reference/cloudchannel/latest", - "default_version": "v1", - "distribution_name": "google-cloud-channel", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "cloudchannel", - "name_pretty": "Channel Services", - "product_documentation": "https://cloud.google.com/channel/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "With Channel Services, Google Cloud partners and resellers have a single unified resale platform, with a unified resale catalog, customer management, order management, billing management, policy and authorization management, and cost management.", + "api_id": "cloudchannel.googleapis.com", + "api_shortname": "cloudchannel", + "client_documentation": "https://cloud.google.com/python/docs/reference/cloudchannel/latest", + "default_version": "v1", + "distribution_name": "google-cloud-channel", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "cloudchannel", + "name_pretty": "Channel Services", + "product_documentation": "https://cloud.google.com/channel/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-chronicle/.repo-metadata.json b/packages/google-cloud-chronicle/.repo-metadata.json index 7444ffb3cad8..a41243c45e36 100644 --- a/packages/google-cloud-chronicle/.repo-metadata.json +++ b/packages/google-cloud-chronicle/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "The Google Cloud Security Operations API, popularly known as the Chronicle API, serves endpoints that enable security analysts to analyze and mitigate a security threat throughout its lifecycle", - "api_id": "chronicle.googleapis.com", - "api_shortname": "chronicle", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-chronicle/latest", - "default_version": "v1", - "distribution_name": "google-cloud-chronicle", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1387895", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-chronicle", - "name_pretty": "Chronicle API", - "product_documentation": "https://cloud.google.com/chronicle/docs/secops/secops-overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "The Google Cloud Security Operations API, popularly known as the Chronicle API, serves endpoints that enable security analysts to analyze and mitigate a security threat throughout its lifecycle", + "api_id": "chronicle.googleapis.com", + "api_shortname": "chronicle", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-chronicle/latest", + "default_version": "v1", + "distribution_name": "google-cloud-chronicle", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1387895", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-chronicle", + "name_pretty": "Chronicle API", + "product_documentation": "https://cloud.google.com/chronicle/docs/secops/secops-overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-cloudcontrolspartner/.repo-metadata.json b/packages/google-cloud-cloudcontrolspartner/.repo-metadata.json index a71a8e25d810..4f4fdbe7a35b 100644 --- a/packages/google-cloud-cloudcontrolspartner/.repo-metadata.json +++ b/packages/google-cloud-cloudcontrolspartner/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Provides insights about your customers and their Assured Workloads based on your Sovereign Controls by Partners offering.", - "api_id": "cloudcontrolspartner.googleapis.com", - "api_shortname": "cloudcontrolspartner", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-cloudcontrolspartner/latest", - "default_version": "v1", - "distribution_name": "google-cloud-cloudcontrolspartner", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-cloudcontrolspartner", - "name_pretty": "Cloud Controls Partner API", - "product_documentation": "https://cloud.google.com/sovereign-controls-by-partners/docs/sovereign-partners/reference/rest", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Provides insights about your customers and their Assured Workloads based on your Sovereign Controls by Partners offering.", + "api_id": "cloudcontrolspartner.googleapis.com", + "api_shortname": "cloudcontrolspartner", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-cloudcontrolspartner/latest", + "default_version": "v1", + "distribution_name": "google-cloud-cloudcontrolspartner", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-cloudcontrolspartner", + "name_pretty": "Cloud Controls Partner API", + "product_documentation": "https://cloud.google.com/sovereign-controls-by-partners/docs/sovereign-partners/reference/rest", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-cloudsecuritycompliance/.repo-metadata.json b/packages/google-cloud-cloudsecuritycompliance/.repo-metadata.json index 9c2d042ea193..754a36b3f955 100644 --- a/packages/google-cloud-cloudsecuritycompliance/.repo-metadata.json +++ b/packages/google-cloud-cloudsecuritycompliance/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "null ", - "api_id": "cloudsecuritycompliance.googleapis.com", - "api_shortname": "cloudsecuritycompliance", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-cloudsecuritycompliance/latest", - "default_version": "v1", - "distribution_name": "google-cloud-cloudsecuritycompliance", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1761967&template=0", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-cloudsecuritycompliance", - "name_pretty": "Cloud Security Compliance API", - "product_documentation": "https://cloud.google.com/security-command-center/docs/compliance-manager-overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "null ", + "api_id": "cloudsecuritycompliance.googleapis.com", + "api_shortname": "cloudsecuritycompliance", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-cloudsecuritycompliance/latest", + "default_version": "v1", + "distribution_name": "google-cloud-cloudsecuritycompliance", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1761967\u0026template=0", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-cloudsecuritycompliance", + "name_pretty": "Cloud Security Compliance API", + "product_documentation": "https://cloud.google.com/security-command-center/docs/compliance-manager-overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-commerce-consumer-procurement/.repo-metadata.json b/packages/google-cloud-commerce-consumer-procurement/.repo-metadata.json index 63490ebf5e3e..fa0f3ffaada8 100644 --- a/packages/google-cloud-commerce-consumer-procurement/.repo-metadata.json +++ b/packages/google-cloud-commerce-consumer-procurement/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Cloud Commerce Consumer Procurement API", - "api_id": "cloudcommerceconsumerprocurement.googleapis.com", - "api_shortname": "procurement", - "client_documentation": "https://cloud.google.com/python/docs/reference/procurement/latest", - "default_version": "v1", - "distribution_name": "google-cloud-commerce-consumer-procurement", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1396141", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "procurement", - "name_pretty": "Cloud Commerce Consumer Procurement API", - "product_documentation": "https://cloud.google.com/marketplace/docs/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Cloud Commerce Consumer Procurement API", + "api_id": "cloudcommerceconsumerprocurement.googleapis.com", + "api_shortname": "procurement", + "client_documentation": "https://cloud.google.com/python/docs/reference/procurement/latest", + "default_version": "v1", + "distribution_name": "google-cloud-commerce-consumer-procurement", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1396141", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "procurement", + "name_pretty": "Cloud Commerce Consumer Procurement API", + "product_documentation": "https://cloud.google.com/marketplace/docs/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-common/.repo-metadata.json b/packages/google-cloud-common/.repo-metadata.json index 4a844f5cf60e..5a401d978d05 100644 --- a/packages/google-cloud-common/.repo-metadata.json +++ b/packages/google-cloud-common/.repo-metadata.json @@ -1,14 +1,15 @@ { - "api_description": "This package contains generated Python types for google.cloud.common", - "client_documentation": "https://cloud.google.com/python/docs/reference/common/latest", - "default_version": "apiVersion", - "distribution_name": "google-cloud-common", - "issue_tracker": "", - "language": "python", - "library_type": "CORE", - "name": "common", - "name_pretty": "Google Cloud Common", - "product_documentation": "https://cloud.google.com", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "This package contains generated Python types for google.cloud.common", + "api_id": "common.googleapis.com", + "api_shortname": "common", + "client_documentation": "https://cloud.google.com/python/docs/reference/common/latest", + "default_version": "apiVersion", + "distribution_name": "google-cloud-common", + "language": "python", + "library_type": "CORE", + "name": "common", + "name_pretty": "Google Cloud Common", + "product_documentation": "https://cloud.google.com", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-compute-v1beta/.repo-metadata.json b/packages/google-cloud-compute-v1beta/.repo-metadata.json index 7961c4fa5705..8eed40378961 100644 --- a/packages/google-cloud-compute-v1beta/.repo-metadata.json +++ b/packages/google-cloud-compute-v1beta/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "delivers virtual machines running in Google's innovative data centers and worldwide fiber network. Compute Engine's tooling and workflow support enable scaling from single instances to global, load-balanced cloud computing. Compute Engine's VMs boot quickly, come with persistent disk storage, deliver consistent performance and are available in many configurations.", - "api_id": "compute.googleapis.com", - "api_shortname": "compute", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-compute-v1beta/latest", - "default_version": "v1beta", - "distribution_name": "google-cloud-compute-v1beta", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=187134&template=0", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-compute-v1beta", - "name_pretty": "Compute Engine", - "product_documentation": "https://cloud.google.com/compute/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "delivers virtual machines running in Google's innovative data centers and worldwide fiber network. Compute Engine's tooling and workflow support enable scaling from single instances to global, load-balanced cloud computing. Compute Engine's VMs boot quickly, come with persistent disk storage, deliver consistent performance and are available in many configurations.", + "api_id": "compute.googleapis.com", + "api_shortname": "compute", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-compute-v1beta/latest", + "default_version": "v1beta", + "distribution_name": "google-cloud-compute-v1beta", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=187134\u0026template=0", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-compute-v1beta", + "name_pretty": "Compute Engine", + "product_documentation": "https://cloud.google.com/compute/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-compute/.repo-metadata.json b/packages/google-cloud-compute/.repo-metadata.json index f889258d9218..7ee2b6fab533 100644 --- a/packages/google-cloud-compute/.repo-metadata.json +++ b/packages/google-cloud-compute/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "delivers virtual machines running in Google's innovative data centers and worldwide fiber network. Compute Engine's tooling and workflow support enable scaling from single instances to global, load-balanced cloud computing. Compute Engine's VMs boot quickly, come with persistent disk storage, deliver consistent performance and are available in many configurations.", - "api_id": "compute.googleapis.com", - "api_shortname": "compute", - "client_documentation": "https://cloud.google.com/python/docs/reference/compute/latest", - "default_version": "v1", - "distribution_name": "google-cloud-compute", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=187134&template=0", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "compute", - "name_pretty": "Compute Engine", - "product_documentation": "https://cloud.google.com/compute/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "delivers virtual machines running in Google's innovative data centers and worldwide fiber network. Compute Engine's tooling and workflow support enable scaling from single instances to global, load-balanced cloud computing. Compute Engine's VMs boot quickly, come with persistent disk storage, deliver consistent performance and are available in many configurations.", + "api_id": "compute.googleapis.com", + "api_shortname": "compute", + "client_documentation": "https://cloud.google.com/python/docs/reference/compute/latest", + "default_version": "v1", + "distribution_name": "google-cloud-compute", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=187134\u0026template=0", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "compute", + "name_pretty": "Compute Engine", + "product_documentation": "https://cloud.google.com/compute/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-confidentialcomputing/.repo-metadata.json b/packages/google-cloud-confidentialcomputing/.repo-metadata.json index a444cbb9fa6a..cea714ba879e 100644 --- a/packages/google-cloud-confidentialcomputing/.repo-metadata.json +++ b/packages/google-cloud-confidentialcomputing/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Protect data in-use with Confidential VMs, Confidential GKE, Confidential Dataproc, and Confidential Space.", - "api_id": "confidentialcomputing.googleapis.com", - "api_shortname": "confidentialcomputing", - "client_documentation": "https://cloud.google.com/python/docs/reference/confidentialcomputing/latest", - "default_version": "v1", - "distribution_name": "google-cloud-confidentialcomputing", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1166820", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "confidentialcomputing", - "name_pretty": "Confidential Computing API", - "product_documentation": "https://cloud.google.com/confidential-computing", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Protect data in-use with Confidential VMs, Confidential GKE, Confidential Dataproc, and Confidential Space.", + "api_id": "confidentialcomputing.googleapis.com", + "api_shortname": "confidentialcomputing", + "client_documentation": "https://cloud.google.com/python/docs/reference/confidentialcomputing/latest", + "default_version": "v1", + "distribution_name": "google-cloud-confidentialcomputing", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1166820", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "confidentialcomputing", + "name_pretty": "Confidential Computing API", + "product_documentation": "https://cloud.google.com/confidential-computing", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-config/.repo-metadata.json b/packages/google-cloud-config/.repo-metadata.json index 15118132fe79..5210406c4cda 100644 --- a/packages/google-cloud-config/.repo-metadata.json +++ b/packages/google-cloud-config/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Infrastructure Manager API", - "api_id": "config.googleapis.com", - "api_shortname": "config", - "client_documentation": "https://cloud.google.com/python/docs/reference/config/latest", - "default_version": "v1", - "distribution_name": "google-cloud-config", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=536700", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "config", - "name_pretty": "Infrastructure Manager API", - "product_documentation": "https://cloud.google.com/infrastructure-manager/docs/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Infrastructure Manager API", + "api_id": "config.googleapis.com", + "api_shortname": "config", + "client_documentation": "https://cloud.google.com/python/docs/reference/config/latest", + "default_version": "v1", + "distribution_name": "google-cloud-config", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=536700", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "config", + "name_pretty": "Infrastructure Manager API", + "product_documentation": "https://cloud.google.com/infrastructure-manager/docs/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-configdelivery/.repo-metadata.json b/packages/google-cloud-configdelivery/.repo-metadata.json index 9d72ffc152a6..58ab89365633 100644 --- a/packages/google-cloud-configdelivery/.repo-metadata.json +++ b/packages/google-cloud-configdelivery/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "ConfigDelivery service manages the deployment of kubernetes configuration to a fleet of kubernetes clusters.", - "api_id": "configdelivery.googleapis.com", - "api_shortname": "configdelivery", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-configdelivery/latest", - "default_version": "v1alpha", - "distribution_name": "google-cloud-configdelivery", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-configdelivery", - "name_pretty": "Config Delivery API", - "product_documentation": "https://cloud.google.com/kubernetes-engine/enterprise/config-sync/docs/reference/rest", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "ConfigDelivery service manages the deployment of kubernetes configuration to a fleet of kubernetes clusters.", + "api_id": "configdelivery.googleapis.com", + "api_shortname": "configdelivery", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-configdelivery/latest", + "default_version": "v1alpha", + "distribution_name": "google-cloud-configdelivery", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-configdelivery", + "name_pretty": "Config Delivery API", + "product_documentation": "https://cloud.google.com/kubernetes-engine/enterprise/config-sync/docs/reference/rest", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-contact-center-insights/.repo-metadata.json b/packages/google-cloud-contact-center-insights/.repo-metadata.json index 8ece15ff7237..a2fe75ff6ad5 100644 --- a/packages/google-cloud-contact-center-insights/.repo-metadata.json +++ b/packages/google-cloud-contact-center-insights/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": " helps users detect and visualize patterns in their contact center data.", - "api_id": "contactcenterinsights.googleapis.com", - "api_shortname": "contactcenterinsights", - "client_documentation": "https://cloud.google.com/python/docs/reference/contactcenterinsights/latest", - "default_version": "v1", - "distribution_name": "google-cloud-contact-center-insights", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "contactcenterinsights", - "name_pretty": "Contact Center AI Insights", - "product_documentation": "https://cloud.google.com/contact-center/insights/docs", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": " helps users detect and visualize patterns in their contact center data.", + "api_id": "contactcenterinsights.googleapis.com", + "api_shortname": "contactcenterinsights", + "client_documentation": "https://cloud.google.com/python/docs/reference/contactcenterinsights/latest", + "default_version": "v1", + "distribution_name": "google-cloud-contact-center-insights", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "contactcenterinsights", + "name_pretty": "Contact Center AI Insights", + "product_documentation": "https://cloud.google.com/contact-center/insights/docs", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-container/.repo-metadata.json b/packages/google-cloud-container/.repo-metadata.json index bd5f586cb464..53051794e678 100644 --- a/packages/google-cloud-container/.repo-metadata.json +++ b/packages/google-cloud-container/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "The Google Kubernetes Engine API is used for building and managing container based applications, powered by the open source Kubernetes technology.", - "api_id": "container.googleapis.com", - "api_shortname": "container", - "client_documentation": "https://cloud.google.com/python/docs/reference/container/latest", - "default_version": "v1", - "distribution_name": "google-cloud-container", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559746", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "container", - "name_pretty": "Kubernetes Engine", - "product_documentation": "https://cloud.google.com/kubernetes-engine/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "The Google Kubernetes Engine API is used for building and managing container based applications, powered by the open source Kubernetes technology.", + "api_id": "container.googleapis.com", + "api_shortname": "container", + "client_documentation": "https://cloud.google.com/python/docs/reference/container/latest", + "default_version": "v1", + "distribution_name": "google-cloud-container", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559746", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "container", + "name_pretty": "Kubernetes Engine", + "product_documentation": "https://cloud.google.com/kubernetes-engine/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-containeranalysis/.repo-metadata.json b/packages/google-cloud-containeranalysis/.repo-metadata.json index 2923f71e4c1c..edf57da46000 100644 --- a/packages/google-cloud-containeranalysis/.repo-metadata.json +++ b/packages/google-cloud-containeranalysis/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "is a service that provides vulnerability scanning and metadata storage for software artifacts. The service performs vulnerability scans on built software artifacts, such as the images in Container Registry, then stores the resulting metadata and makes it available for consumption through an API. The metadata may come from several sources, including vulnerability scanning, other Cloud services, and third-party providers.", - "api_id": "containeranalysis.googleapis.com", - "api_shortname": "containeranalysis", - "client_documentation": "https://cloud.google.com/python/docs/reference/containeranalysis/latest", - "default_version": "v1", - "distribution_name": "google-cloud-containeranalysis", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559777", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "containeranalysis", - "name_pretty": "Container Analysis", - "product_documentation": "https://cloud.google.com/container-registry/docs/container-analysis", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": false + "api_description": "is a service that provides vulnerability scanning and metadata storage for software artifacts. The service performs vulnerability scans on built software artifacts, such as the images in Container Registry, then stores the resulting metadata and makes it available for consumption through an API. The metadata may come from several sources, including vulnerability scanning, other Cloud services, and third-party providers.", + "api_id": "containeranalysis.googleapis.com", + "api_shortname": "containeranalysis", + "client_documentation": "https://cloud.google.com/python/docs/reference/containeranalysis/latest", + "default_version": "v1", + "distribution_name": "google-cloud-containeranalysis", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559777", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "containeranalysis", + "name_pretty": "Container Analysis", + "product_documentation": "https://cloud.google.com/container-registry/docs/container-analysis", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-contentwarehouse/.repo-metadata.json b/packages/google-cloud-contentwarehouse/.repo-metadata.json index 3a5dae48bb53..555d50ab5893 100644 --- a/packages/google-cloud-contentwarehouse/.repo-metadata.json +++ b/packages/google-cloud-contentwarehouse/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "", - "api_id": "contentwarehouse.googleapis.com", - "api_shortname": "contentwarehouse", - "client_documentation": "https://cloud.google.com/python/docs/reference/contentwarehouse/latest", - "default_version": "v1", - "distribution_name": "google-cloud-contentwarehouse", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "contentwarehouse", - "name_pretty": "Document AI Warehouse", - "product_documentation": "https://cloud.google.com/document-warehouse/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_id": "contentwarehouse.googleapis.com", + "api_shortname": "contentwarehouse", + "client_documentation": "https://cloud.google.com/python/docs/reference/contentwarehouse/latest", + "default_version": "v1", + "distribution_name": "google-cloud-contentwarehouse", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "contentwarehouse", + "name_pretty": "Document AI Warehouse", + "product_documentation": "https://cloud.google.com/document-warehouse/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-core/.repo-metadata.json b/packages/google-cloud-core/.repo-metadata.json index 187131802b31..3a1c9dda3e06 100644 --- a/packages/google-cloud-core/.repo-metadata.json +++ b/packages/google-cloud-core/.repo-metadata.json @@ -1,13 +1,11 @@ { - "name": "google-cloud-core", - "name_pretty": "Google API client core library", "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-core/latest", + "distribution_name": "google-cloud-core", "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "release_level": "stable", "language": "python", "library_type": "CORE", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-core", - "default_version": "", - "codeowner_team": "" -} + "name": "google-cloud-core", + "name_pretty": "Google API client core library", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-core/docs/README.rst b/packages/google-cloud-core/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/packages/google-cloud-core/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/packages/google-cloud-core/docs/README.rst b/packages/google-cloud-core/docs/README.rst new file mode 100644 index 000000000000..bfd89a74e64c --- /dev/null +++ b/packages/google-cloud-core/docs/README.rst @@ -0,0 +1,40 @@ +Core Helpers for Google Cloud Python Client Library +=================================================== + +|pypi| |versions| + +This library is not meant to stand-alone. Instead it defines +common helpers (e.g. base ``Client`` classes) used by all of the +``google-cloud-*`` packages. + + +- `Documentation`_ + +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-core.svg + :target: https://pypi.org/project/google-cloud-core/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-core.svg + :target: https://pypi.org/project/google-cloud-core/ +.. _Documentation: https://cloud.google.com/python/docs/reference/google-cloud-core/latest + +Quick Start +----------- + +.. code-block:: console + + $ pip install --upgrade google-cloud-core + +For more information on setting up your Python development environment, +such as installing ``pip`` and ``virtualenv`` on your system, please refer +to `Python Development Environment Setup Guide`_ for Google Cloud Platform. + +.. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup + + +Supported Python Versions +------------------------- +Python >= 3.9 + +Unsupported Python Versions +--------------------------- + +Python <= 3.8 diff --git a/packages/google-cloud-data-fusion/.repo-metadata.json b/packages/google-cloud-data-fusion/.repo-metadata.json index c66758bd9af0..2c2db71d4d0c 100644 --- a/packages/google-cloud-data-fusion/.repo-metadata.json +++ b/packages/google-cloud-data-fusion/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "is a fully managed, cloud-native, enterprise data integration service for quickly building and managing data pipelines.", - "api_id": "datafusion.googleapis.com", - "api_shortname": "datafusion", - "client_documentation": "https://cloud.google.com/python/docs/reference/datafusion/latest", - "default_version": "v1", - "distribution_name": "google-cloud-data-fusion", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "datafusion", - "name_pretty": "Cloud Data Fusion", - "product_documentation": "https://cloud.google.com/data-fusion", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "is a fully managed, cloud-native, enterprise data integration service for quickly building and managing data pipelines.", + "api_id": "datafusion.googleapis.com", + "api_shortname": "datafusion", + "client_documentation": "https://cloud.google.com/python/docs/reference/datafusion/latest", + "default_version": "v1", + "distribution_name": "google-cloud-data-fusion", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "datafusion", + "name_pretty": "Cloud Data Fusion", + "product_documentation": "https://cloud.google.com/data-fusion", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-data-qna/.repo-metadata.json b/packages/google-cloud-data-qna/.repo-metadata.json index 66876382ba79..6522942ef969 100644 --- a/packages/google-cloud-data-qna/.repo-metadata.json +++ b/packages/google-cloud-data-qna/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Data QnA is a natural language question and answer service for BigQuery data.", - "api_id": "dataqna.googleapis.com", - "api_shortname": "dataqna", - "client_documentation": "https://cloud.google.com/python/docs/reference/dataqna/latest", - "default_version": "v1alpha", - "distribution_name": "google-cloud-data-qna", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "dataqna", - "name_pretty": "Data QnA", - "product_documentation": "https://cloud.google.com/bigquery/docs/dataqna", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Data QnA is a natural language question and answer service for BigQuery data.", + "api_id": "dataqna.googleapis.com", + "api_shortname": "dataqna", + "client_documentation": "https://cloud.google.com/python/docs/reference/dataqna/latest", + "default_version": "v1alpha", + "distribution_name": "google-cloud-data-qna", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "dataqna", + "name_pretty": "Data QnA", + "product_documentation": "https://cloud.google.com/bigquery/docs/dataqna", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-databasecenter/.repo-metadata.json b/packages/google-cloud-databasecenter/.repo-metadata.json index 0a63fe9226bc..e1055b2ba53f 100644 --- a/packages/google-cloud-databasecenter/.repo-metadata.json +++ b/packages/google-cloud-databasecenter/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Database Center offers a comprehensive, organization-wide platform for\nmonitoring database fleet health across various products. It simplifies\nmanagement and reduces risk by automatically aggregating and summarizing\nkey health signals, removing the need for custom dashboards. The platform\nprovides a unified view through its dashboard and API, enabling teams\nfocused on reliability, compliance, security, cost, and administration to\nquickly identify and address relevant issues within their database fleets.", - "api_id": "databasecenter.googleapis.com", - "api_shortname": "databasecenter", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-databasecenter/latest", - "default_version": "v1beta", - "distribution_name": "google-cloud-databasecenter", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1476470", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-databasecenter", - "name_pretty": "Database Center API", - "product_documentation": "https://cloud.google.com/database-center/docs/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" -} + "api_description": "Database Center offers a comprehensive, organization-wide platform for\nmonitoring database fleet health across various products. It simplifies\nmanagement and reduces risk by automatically aggregating and summarizing\nkey health signals, removing the need for custom dashboards. The platform\nprovides a unified view through its dashboard and API, enabling teams\nfocused on reliability, compliance, security, cost, and administration to\nquickly identify and address relevant issues within their database fleets.", + "api_id": "databasecenter.googleapis.com", + "api_shortname": "databasecenter", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-databasecenter/latest", + "default_version": "v1beta", + "distribution_name": "google-cloud-databasecenter", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1476470", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-databasecenter", + "name_pretty": "Database Center API", + "product_documentation": "https://cloud.google.com/database-center/docs/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-datacatalog-lineage-configmanagement/.repo-metadata.json b/packages/google-cloud-datacatalog-lineage-configmanagement/.repo-metadata.json index be1b59a4a834..473750175982 100644 --- a/packages/google-cloud-datacatalog-lineage-configmanagement/.repo-metadata.json +++ b/packages/google-cloud-datacatalog-lineage-configmanagement/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "", - "api_id": "datalineage.googleapis.com", - "api_shortname": "datalineage", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-datacatalog-lineage-configmanagement/latest", - "default_version": "v1", - "distribution_name": "google-cloud-datacatalog-lineage-configmanagement", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1530027", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-datacatalog-lineage-configmanagement", - "name_pretty": "Data Lineage API", - "product_documentation": "https://cloud.google.com/dataplex/docs/about-data-lineage", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" -} + "api_id": "datalineage.googleapis.com", + "api_shortname": "datalineage", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-datacatalog-lineage-configmanagement/latest", + "default_version": "v1", + "distribution_name": "google-cloud-datacatalog-lineage-configmanagement", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1530027", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-datacatalog-lineage-configmanagement", + "name_pretty": "Data Lineage API", + "product_documentation": "https://cloud.google.com/dataplex/docs/about-data-lineage", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-datacatalog-lineage-configmanagement/README.rst b/packages/google-cloud-datacatalog-lineage-configmanagement/README.rst index 94302eb90a62..09af4e1e4359 100644 --- a/packages/google-cloud-datacatalog-lineage-configmanagement/README.rst +++ b/packages/google-cloud-datacatalog-lineage-configmanagement/README.rst @@ -1,14 +1,14 @@ Python Client for Data Lineage API ================================== -|preview| |pypi| |versions| +|stable| |pypi| |versions| `Data Lineage API`_: - `Client Library Documentation`_ - `Product Documentation`_ -.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-datacatalog-lineage-configmanagement.svg :target: https://pypi.org/project/google-cloud-datacatalog-lineage-configmanagement/ @@ -61,14 +61,14 @@ Supported Python Versions Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of Python. -Python >= 3.7, including 3.14 +Python >= 3.9, including 3.14 .. _active: https://devguide.python.org/devcycle/#in-development-main-branch .. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python <= 3.6 +Python <= 3.8 If you are using an `end-of-life`_ version of Python, we recommend that you update as soon as possible to an actively supported version. diff --git a/packages/google-cloud-datacatalog-lineage-configmanagement/docs/README.rst b/packages/google-cloud-datacatalog-lineage-configmanagement/docs/README.rst index 94302eb90a62..09af4e1e4359 100644 --- a/packages/google-cloud-datacatalog-lineage-configmanagement/docs/README.rst +++ b/packages/google-cloud-datacatalog-lineage-configmanagement/docs/README.rst @@ -1,14 +1,14 @@ Python Client for Data Lineage API ================================== -|preview| |pypi| |versions| +|stable| |pypi| |versions| `Data Lineage API`_: - `Client Library Documentation`_ - `Product Documentation`_ -.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-datacatalog-lineage-configmanagement.svg :target: https://pypi.org/project/google-cloud-datacatalog-lineage-configmanagement/ @@ -61,14 +61,14 @@ Supported Python Versions Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of Python. -Python >= 3.7, including 3.14 +Python >= 3.9, including 3.14 .. _active: https://devguide.python.org/devcycle/#in-development-main-branch .. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python <= 3.6 +Python <= 3.8 If you are using an `end-of-life`_ version of Python, we recommend that you update as soon as possible to an actively supported version. diff --git a/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/async_client.py b/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/async_client.py index c910d3430f24..c9e8f5c28f65 100644 --- a/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/async_client.py +++ b/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/async_client.py @@ -202,7 +202,7 @@ def transport(self) -> ConfigManagementServiceTransport: return self._client.transport @property - def api_endpoint(self): + def api_endpoint(self) -> str: """Return the API endpoint used by the client instance. Returns: @@ -546,7 +546,7 @@ async def sample_update_config(): async def list_operations( self, - request: Optional[operations_pb2.ListOperationsRequest] = None, + request: Optional[Union[operations_pb2.ListOperationsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -572,8 +572,12 @@ async def list_operations( # Create or coerce a protobuf request object. # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) + if request is None: + request_pb = operations_pb2.ListOperationsRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.ListOperationsRequest(**request) + else: + request_pb = request # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -582,7 +586,7 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -590,7 +594,7 @@ async def list_operations( # Send the request. response = await rpc( - request, + request_pb, retry=retry, timeout=timeout, metadata=metadata, @@ -601,7 +605,7 @@ async def list_operations( async def get_operation( self, - request: Optional[operations_pb2.GetOperationRequest] = None, + request: Optional[Union[operations_pb2.GetOperationRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -627,8 +631,12 @@ async def get_operation( # Create or coerce a protobuf request object. # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) + if request is None: + request_pb = operations_pb2.GetOperationRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.GetOperationRequest(**request) + else: + request_pb = request # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -637,7 +645,7 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -645,7 +653,7 @@ async def get_operation( # Send the request. response = await rpc( - request, + request_pb, retry=retry, timeout=timeout, metadata=metadata, @@ -656,7 +664,7 @@ async def get_operation( async def delete_operation( self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, + request: Optional[Union[operations_pb2.DeleteOperationRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -686,8 +694,12 @@ async def delete_operation( # Create or coerce a protobuf request object. # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) + if request is None: + request_pb = operations_pb2.DeleteOperationRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.DeleteOperationRequest(**request) + else: + request_pb = request # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -696,7 +708,7 @@ async def delete_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -704,7 +716,7 @@ async def delete_operation( # Send the request. await rpc( - request, + request_pb, retry=retry, timeout=timeout, metadata=metadata, @@ -712,7 +724,7 @@ async def delete_operation( async def cancel_operation( self, - request: Optional[operations_pb2.CancelOperationRequest] = None, + request: Optional[Union[operations_pb2.CancelOperationRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -741,8 +753,12 @@ async def cancel_operation( # Create or coerce a protobuf request object. # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) + if request is None: + request_pb = operations_pb2.CancelOperationRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.CancelOperationRequest(**request) + else: + request_pb = request # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -751,7 +767,7 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -759,7 +775,7 @@ async def cancel_operation( # Send the request. await rpc( - request, + request_pb, retry=retry, timeout=timeout, metadata=metadata, diff --git a/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/client.py b/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/client.py index aa40674ce1a4..30da20bb6e6f 100644 --- a/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/client.py +++ b/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/client.py @@ -116,7 +116,7 @@ class ConfigManagementServiceClient(metaclass=ConfigManagementServiceClientMeta) """ @staticmethod - def _get_default_mtls_endpoint(api_endpoint): + def _get_default_mtls_endpoint(api_endpoint) -> Optional[str]: """Converts api endpoint to mTLS endpoint. Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to @@ -124,7 +124,7 @@ def _get_default_mtls_endpoint(api_endpoint): Args: api_endpoint (Optional[str]): the api endpoint to convert. Returns: - str: converted mTLS api endpoint. + Optional[str]: converted mTLS api endpoint. """ if not api_endpoint: return api_endpoint @@ -134,6 +134,10 @@ def _get_default_mtls_endpoint(api_endpoint): ) m = mtls_endpoint_re.match(api_endpoint) + if m is None: + # Could not parse api_endpoint; return as-is. + return api_endpoint + name, mtls, sandbox, googledomain = m.groups() if mtls or not googledomain: return api_endpoint @@ -438,7 +442,7 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): @staticmethod def _get_api_endpoint( api_override, client_cert_source, universe_domain, use_mtls_endpoint - ): + ) -> str: """Return the API endpoint used by the client. Args: @@ -537,7 +541,7 @@ def _add_cred_info_for_auth_errors( error._details.append(json.dumps(cred_info)) @property - def api_endpoint(self): + def api_endpoint(self) -> str: """Return the API endpoint used by the client instance. Returns: @@ -639,7 +643,7 @@ def __init__( self._universe_domain = ConfigManagementServiceClient._get_universe_domain( universe_domain_opt, self._universe_domain_env ) - self._api_endpoint = None # updated below, depending on `transport` + self._api_endpoint: str = "" # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False @@ -975,7 +979,7 @@ def __exit__(self, type, value, traceback): def list_operations( self, - request: Optional[operations_pb2.ListOperationsRequest] = None, + request: Optional[Union[operations_pb2.ListOperationsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -1001,8 +1005,12 @@ def list_operations( # Create or coerce a protobuf request object. # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) + if request is None: + request_pb = operations_pb2.ListOperationsRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.ListOperationsRequest(**request) + else: + request_pb = request # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1011,7 +1019,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1020,7 +1028,7 @@ def list_operations( try: # Send the request. response = rpc( - request, + request_pb, retry=retry, timeout=timeout, metadata=metadata, @@ -1034,7 +1042,7 @@ def list_operations( def get_operation( self, - request: Optional[operations_pb2.GetOperationRequest] = None, + request: Optional[Union[operations_pb2.GetOperationRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -1060,8 +1068,12 @@ def get_operation( # Create or coerce a protobuf request object. # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) + if request is None: + request_pb = operations_pb2.GetOperationRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.GetOperationRequest(**request) + else: + request_pb = request # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1070,7 +1082,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1079,7 +1091,7 @@ def get_operation( try: # Send the request. response = rpc( - request, + request_pb, retry=retry, timeout=timeout, metadata=metadata, @@ -1093,7 +1105,7 @@ def get_operation( def delete_operation( self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, + request: Optional[Union[operations_pb2.DeleteOperationRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -1123,8 +1135,12 @@ def delete_operation( # Create or coerce a protobuf request object. # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) + if request is None: + request_pb = operations_pb2.DeleteOperationRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.DeleteOperationRequest(**request) + else: + request_pb = request # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1133,7 +1149,7 @@ def delete_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1141,7 +1157,7 @@ def delete_operation( # Send the request. rpc( - request, + request_pb, retry=retry, timeout=timeout, metadata=metadata, @@ -1149,7 +1165,7 @@ def delete_operation( def cancel_operation( self, - request: Optional[operations_pb2.CancelOperationRequest] = None, + request: Optional[Union[operations_pb2.CancelOperationRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -1178,8 +1194,12 @@ def cancel_operation( # Create or coerce a protobuf request object. # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) + if request is None: + request_pb = operations_pb2.CancelOperationRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.CancelOperationRequest(**request) + else: + request_pb = request # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1188,7 +1208,7 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1196,7 +1216,7 @@ def cancel_operation( # Send the request. rpc( - request, + request_pb, retry=retry, timeout=timeout, metadata=metadata, diff --git a/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/README.rst b/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/README.rst index 67c45d526844..a8cce1db64bf 100644 --- a/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/README.rst +++ b/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/README.rst @@ -2,8 +2,9 @@ transport inheritance structure _______________________________ -`ConfigManagementServiceTransport` is the ABC for all transports. -- public child `ConfigManagementServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `ConfigManagementServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseConfigManagementServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `ConfigManagementServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). +``ConfigManagementServiceTransport`` is the ABC for all transports. + +- public child ``ConfigManagementServiceGrpcTransport`` for sync gRPC transport (defined in ``grpc.py``). +- public child ``ConfigManagementServiceGrpcAsyncIOTransport`` for async gRPC transport (defined in ``grpc_asyncio.py``). +- private child ``_BaseConfigManagementServiceRestTransport`` for base REST transport with inner classes ``_BaseMETHOD`` (defined in ``rest_base.py``). +- public child ``ConfigManagementServiceRestTransport`` for sync REST transport with inner classes ``METHOD`` derived from the parent's corresponding ``_BaseMETHOD`` classes (defined in ``rest.py``). diff --git a/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/base.py b/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/base.py index f230c6e28d84..f428a34c8e0e 100644 --- a/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/base.py +++ b/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/base.py @@ -83,6 +83,10 @@ def __init__( your own client library. always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. """ # Save the scopes. @@ -132,6 +136,8 @@ def __init__( host += ":443" self._host = host + self._wrapped_methods: Dict[Callable, Callable] = {} + @property def host(self): return self._host diff --git a/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/grpc.py b/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/grpc.py index e4245abd5f45..09b727373ac7 100644 --- a/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/grpc.py +++ b/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/grpc.py @@ -55,7 +55,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): elif isinstance(request, google.protobuf.message.Message): request_payload = MessageToJson(request) else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + request_payload = f"{type(request).__name__}: {pickle.dumps(request)!r}" request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value @@ -90,7 +90,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): elif isinstance(result, google.protobuf.message.Message): response_payload = MessageToJson(result) else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + response_payload = f"{type(result).__name__}: {pickle.dumps(result)!r}" grpc_response = { "payload": response_payload, "metadata": metadata, @@ -188,6 +188,10 @@ def __init__( your own client library. always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport diff --git a/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/grpc_asyncio.py b/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/grpc_asyncio.py index 2cdd7e3baea3..dfeb133f7e84 100644 --- a/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/grpc_asyncio.py @@ -61,7 +61,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request elif isinstance(request, google.protobuf.message.Message): request_payload = MessageToJson(request) else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + request_payload = f"{type(request).__name__}: {pickle.dumps(request)!r}" request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value @@ -96,7 +96,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request elif isinstance(result, google.protobuf.message.Message): response_payload = MessageToJson(result) else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + response_payload = f"{type(result).__name__}: {pickle.dumps(result)!r}" grpc_response = { "payload": response_payload, "metadata": metadata, @@ -239,6 +239,10 @@ def __init__( your own client library. always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport diff --git a/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/rest.py b/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/rest.py index 0bbeabccc2d7..a31fc6b88506 100644 --- a/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/rest.py +++ b/packages/google-cloud-datacatalog-lineage-configmanagement/google/cloud/datacatalog_lineage_configmanagement_v1/services/config_management_service/transports/rest.py @@ -357,6 +357,12 @@ def __init__( url_scheme: the protocol scheme for the API endpoint. Normally "https", but for testing or local servers, "http" can be specified. + interceptor (Optional[ConfigManagementServiceRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. diff --git a/packages/google-cloud-datacatalog-lineage-configmanagement/noxfile.py b/packages/google-cloud-datacatalog-lineage-configmanagement/noxfile.py index 637600aa7431..263b897c4d05 100644 --- a/packages/google-cloud-datacatalog-lineage-configmanagement/noxfile.py +++ b/packages/google-cloud-datacatalog-lineage-configmanagement/noxfile.py @@ -31,8 +31,6 @@ LINT_PATHS.append("samples") ALL_PYTHON = [ - "3.7", - "3.8", "3.9", "3.10", "3.11", @@ -95,8 +93,9 @@ @nox.session(python=ALL_PYTHON) def mypy(session): """Run the type checker.""" + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2579): + # use the latest version of mypy session.install( - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy "mypy<1.16.0", "types-requests", "types-protobuf", @@ -106,6 +105,8 @@ def mypy(session): "mypy", "-p", "google", + "--check-untyped-defs", + *session.posargs, ) @@ -251,32 +252,16 @@ def install_unittest_dependencies(session, *constraints): @nox.session(python=ALL_PYTHON) @nox.parametrize( "protobuf_implementation", - ["python", "upb", "cpp"], + ["python", "upb"], ) def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ( - "3.11", - "3.12", - "3.13", - "3.14", - ): - session.skip("cpp implementation is not supported in python 3.11+") - constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - # Run py.test against the unit tests. session.run( "py.test", @@ -297,7 +282,10 @@ def unit(session, protobuf_implementation): def install_systemtest_dependencies(session, *constraints): - session.install("--pre", "grpcio") + if session.python >= "3.12": + session.install("--pre", "grpcio>=1.75.1") + else: + session.install("--pre", "grpcio<=1.62.2") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -464,7 +452,7 @@ def docfx(session): @nox.session(python=PREVIEW_PYTHON_VERSION) @nox.parametrize( "protobuf_implementation", - ["python", "upb", "cpp"], + ["python", "upb"], ) def prerelease_deps(session, protobuf_implementation): """ @@ -474,16 +462,6 @@ def prerelease_deps(session, protobuf_implementation): `pip install --pre `. """ - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ( - "3.11", - "3.12", - "3.13", - "3.14", - ): - session.skip("cpp implementation is not supported in python 3.11+") - # Install all dependencies session.install("-e", ".") @@ -527,7 +505,7 @@ def prerelease_deps(session, protobuf_implementation): "google-api-core", "google-auth", "grpc-google-iam-v1", - "grpcio", + "grpcio>=1.75.1" if session.python >= "3.12" else "grpcio<=1.62.2", "grpcio-status", "protobuf", "proto-plus", @@ -622,7 +600,7 @@ def core_deps_from_source(session, protobuf_implementation): core_dependencies_from_source = [ "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", "google-api-core @ git+https://github.com/googleapis/google-cloud-python#egg=google-api-core&subdirectory=packages/google-api-core", - "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "google-auth @ git+https://github.com/googleapis/google-cloud-python#egg=google-auth&subdirectory=packages/google-auth", "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", "proto-plus @ git+https://github.com/googleapis/google-cloud-python#egg=proto-plus&subdirectory=packages/proto-plus", ] diff --git a/packages/google-cloud-datacatalog-lineage-configmanagement/setup.py b/packages/google-cloud-datacatalog-lineage-configmanagement/setup.py index 0c5ae25fe482..3a32ce1deee0 100644 --- a/packages/google-cloud-datacatalog-lineage-configmanagement/setup.py +++ b/packages/google-cloud-datacatalog-lineage-configmanagement/setup.py @@ -44,7 +44,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 2.11.0, <3.0.0", # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", @@ -52,7 +52,7 @@ "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf >= 4.25.8, < 8.0.0", ] extras = {} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog-lineage-configmanagement" @@ -84,8 +84,6 @@ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", @@ -97,7 +95,7 @@ ], platforms="Posix; MacOS X; Windows", packages=packages, - python_requires=">=3.7", + python_requires=">=3.9", install_requires=dependencies, extras_require=extras, include_package_data=True, diff --git a/packages/google-cloud-datacatalog-lineage-configmanagement/testing/constraints-3.7.txt b/packages/google-cloud-datacatalog-lineage-configmanagement/testing/constraints-3.7.txt deleted file mode 100644 index bbf88e9745ae..000000000000 --- a/packages/google-cloud-datacatalog-lineage-configmanagement/testing/constraints-3.7.txt +++ /dev/null @@ -1,14 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -# cryptography is a direct dependency of google-auth -cryptography==38.0.3 -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) -# Add the minimum supported version of grpcio to constraints files -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/packages/google-cloud-datacatalog-lineage-configmanagement/testing/constraints-3.8.txt b/packages/google-cloud-datacatalog-lineage-configmanagement/testing/constraints-3.8.txt deleted file mode 100644 index 7599dea499ed..000000000000 --- a/packages/google-cloud-datacatalog-lineage-configmanagement/testing/constraints-3.8.txt +++ /dev/null @@ -1,10 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -# cryptography is a direct dependency of google-auth -cryptography diff --git a/packages/google-cloud-datacatalog-lineage-configmanagement/testing/constraints-3.9.txt b/packages/google-cloud-datacatalog-lineage-configmanagement/testing/constraints-3.9.txt index 7599dea499ed..ac3833d41b9a 100644 --- a/packages/google-cloud-datacatalog-lineage-configmanagement/testing/constraints-3.9.txt +++ b/packages/google-cloud-datacatalog-lineage-configmanagement/testing/constraints-3.9.txt @@ -1,10 +1,13 @@ # -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -# cryptography is a direct dependency of google-auth -cryptography +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file, +# pinning their versions to their lower bounds. +# For example, if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# then this file should have google-cloud-foo==1.14.0 +google-api-core==2.21.0 +google-auth==2.35.0 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) +# Add the minimum supported version of grpcio to constraints files +proto-plus==1.22.3 +protobuf==4.25.8 diff --git a/packages/google-cloud-datacatalog-lineage-configmanagement/tests/unit/gapic/datacatalog_lineage_configmanagement_v1/test_config_management_service.py b/packages/google-cloud-datacatalog-lineage-configmanagement/tests/unit/gapic/datacatalog_lineage_configmanagement_v1/test_config_management_service.py index 2a335dfc14d8..75b1f25e9a77 100644 --- a/packages/google-cloud-datacatalog-lineage-configmanagement/tests/unit/gapic/datacatalog_lineage_configmanagement_v1/test_config_management_service.py +++ b/packages/google-cloud-datacatalog-lineage-configmanagement/tests/unit/gapic/datacatalog_lineage_configmanagement_v1/test_config_management_service.py @@ -119,6 +119,7 @@ def test__get_default_mtls_endpoint(): sandbox_endpoint = "example.sandbox.googleapis.com" sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" non_googleapi = "api.example.com" + custom_endpoint = ".custom" assert ConfigManagementServiceClient._get_default_mtls_endpoint(None) is None assert ( @@ -141,6 +142,10 @@ def test__get_default_mtls_endpoint(): ConfigManagementServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi ) + assert ( + ConfigManagementServiceClient._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) def test__read_environment_variables(): @@ -1346,11 +1351,13 @@ def test_config_management_service_client_create_channel_credentials_file( ) # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object(grpc_helpers, "create_channel") as create_channel: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object(grpc_helpers, "create_channel") as create_channel, + ): creds = ga_credentials.AnonymousCredentials() file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) @@ -2607,8 +2614,9 @@ def test_get_config_rest_bad_request(request_type=configmanagement.GetConfigRequ request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -2673,18 +2681,20 @@ def test_get_config_rest_interceptors(null_interceptor): ) client = ConfigManagementServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ConfigManagementServiceRestInterceptor, "post_get_config" - ) as post, mock.patch.object( - transports.ConfigManagementServiceRestInterceptor, - "post_get_config_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.ConfigManagementServiceRestInterceptor, "pre_get_config" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ConfigManagementServiceRestInterceptor, "post_get_config" + ) as post, + mock.patch.object( + transports.ConfigManagementServiceRestInterceptor, + "post_get_config_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.ConfigManagementServiceRestInterceptor, "pre_get_config" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -2737,8 +2747,9 @@ def test_update_config_rest_bad_request( request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -2882,18 +2893,20 @@ def test_update_config_rest_interceptors(null_interceptor): ) client = ConfigManagementServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ConfigManagementServiceRestInterceptor, "post_update_config" - ) as post, mock.patch.object( - transports.ConfigManagementServiceRestInterceptor, - "post_update_config_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.ConfigManagementServiceRestInterceptor, "pre_update_config" - ) as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ConfigManagementServiceRestInterceptor, "post_update_config" + ) as post, + mock.patch.object( + transports.ConfigManagementServiceRestInterceptor, + "post_update_config_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.ConfigManagementServiceRestInterceptor, "pre_update_config" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -2948,8 +2961,9 @@ def test_cancel_operation_rest_bad_request( ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = Response() @@ -3010,8 +3024,9 @@ def test_delete_operation_rest_bad_request( ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = Response() @@ -3072,8 +3087,9 @@ def test_get_operation_rest_bad_request( ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = Response() @@ -3134,8 +3150,9 @@ def test_list_operations_rest_bad_request( ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), ): # Wrap the value into a proper Response obj response_value = Response() @@ -3288,11 +3305,14 @@ def test_config_management_service_base_transport(): def test_config_management_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.datacatalog_lineage_configmanagement_v1.services.config_management_service.transports.ConfigManagementServiceTransport._prep_wrapped_messages" - ) as Transport: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.datacatalog_lineage_configmanagement_v1.services.config_management_service.transports.ConfigManagementServiceTransport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ConfigManagementServiceTransport( @@ -3309,9 +3329,12 @@ def test_config_management_service_base_transport_with_credentials_file(): def test_config_management_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.datacatalog_lineage_configmanagement_v1.services.config_management_service.transports.ConfigManagementServiceTransport._prep_wrapped_messages" - ) as Transport: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.datacatalog_lineage_configmanagement_v1.services.config_management_service.transports.ConfigManagementServiceTransport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ConfigManagementServiceTransport() @@ -3385,11 +3408,12 @@ def test_config_management_service_transport_create_channel( ): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel, + ): creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) @@ -3954,6 +3978,38 @@ async def test_delete_operation_from_dict_async(): call.assert_called() +def test_delete_operation_flattened(): + client = ConfigManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + client.delete_operation() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.DeleteOperationRequest() + + +@pytest.mark.asyncio +async def test_delete_operation_flattened_async(): + client = ConfigManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.DeleteOperationRequest() + + def test_cancel_operation(transport: str = "grpc"): client = ConfigManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4093,6 +4149,38 @@ async def test_cancel_operation_from_dict_async(): call.assert_called() +def test_cancel_operation_flattened(): + client = ConfigManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + client.cancel_operation() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.CancelOperationRequest() + + +@pytest.mark.asyncio +async def test_cancel_operation_flattened_async(): + client = ConfigManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.CancelOperationRequest() + + def test_get_operation(transport: str = "grpc"): client = ConfigManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4238,6 +4326,40 @@ async def test_get_operation_from_dict_async(): call.assert_called() +def test_get_operation_flattened(): + client = ConfigManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + client.get_operation() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.GetOperationRequest() + + +@pytest.mark.asyncio +async def test_get_operation_flattened_async(): + client = ConfigManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.GetOperationRequest() + + def test_list_operations(transport: str = "grpc"): client = ConfigManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4383,6 +4505,40 @@ async def test_list_operations_from_dict_async(): call.assert_called() +def test_list_operations_flattened(): + client = ConfigManagementServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.ListOperationsRequest() + + +@pytest.mark.asyncio +async def test_list_operations_flattened_async(): + client = ConfigManagementServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.ListOperationsRequest() + + def test_transport_close_grpc(): client = ConfigManagementServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc" diff --git a/packages/google-cloud-datacatalog-lineage/.repo-metadata.json b/packages/google-cloud-datacatalog-lineage/.repo-metadata.json index d0626f8661e0..3846352d44e9 100644 --- a/packages/google-cloud-datacatalog-lineage/.repo-metadata.json +++ b/packages/google-cloud-datacatalog-lineage/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Data lineage is a Dataplex feature that lets you track how data moves through your systems: where it comes from, where it is passed to, and what transformations are applied to it.", - "api_id": "datalineage.googleapis.com", - "api_shortname": "lineage", - "client_documentation": "https://cloud.google.com/python/docs/reference/lineage/latest", - "default_version": "v1", - "distribution_name": "google-cloud-datacatalog-lineage", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "lineage", - "name_pretty": "Data Lineage API", - "product_documentation": "https://cloud.google.com/data-catalog/docs/concepts/about-data-lineage", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Data lineage is a Dataplex feature that lets you track how data moves through your systems: where it comes from, where it is passed to, and what transformations are applied to it.", + "api_id": "datalineage.googleapis.com", + "api_shortname": "lineage", + "client_documentation": "https://cloud.google.com/python/docs/reference/lineage/latest", + "default_version": "v1", + "distribution_name": "google-cloud-datacatalog-lineage", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "lineage", + "name_pretty": "Data Lineage API", + "product_documentation": "https://cloud.google.com/data-catalog/docs/concepts/about-data-lineage", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-datacatalog/.repo-metadata.json b/packages/google-cloud-datacatalog/.repo-metadata.json index e51b83b6f3e4..6fe6ebdf5b11 100644 --- a/packages/google-cloud-datacatalog/.repo-metadata.json +++ b/packages/google-cloud-datacatalog/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "is a fully managed and highly scalable data discovery and metadata management service.", - "api_id": "datacatalog.googleapis.com", - "api_shortname": "datacatalog", - "client_documentation": "https://cloud.google.com/python/docs/reference/datacatalog/latest", - "default_version": "v1", - "distribution_name": "google-cloud-datacatalog", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "datacatalog", - "name_pretty": "Google Cloud Data Catalog", - "product_documentation": "https://cloud.google.com/data-catalog", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "is a fully managed and highly scalable data discovery and metadata management service.", + "api_id": "datacatalog.googleapis.com", + "api_shortname": "datacatalog", + "client_documentation": "https://cloud.google.com/python/docs/reference/datacatalog/latest", + "default_version": "v1", + "distribution_name": "google-cloud-datacatalog", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=655468\u0026template=1284353", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "datacatalog", + "name_pretty": "Google Cloud Data Catalog", + "product_documentation": "https://cloud.google.com/data-catalog", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-dataflow-client/.repo-metadata.json b/packages/google-cloud-dataflow-client/.repo-metadata.json index 0bce5cfb3035..51c55513abbc 100644 --- a/packages/google-cloud-dataflow-client/.repo-metadata.json +++ b/packages/google-cloud-dataflow-client/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Unified stream and batch data processing that's serverless, fast, and cost-effective.", - "api_id": "dataflow.googleapis.com", - "api_shortname": "dataflow", - "client_documentation": "https://cloud.google.com/python/docs/reference/dataflow/latest", - "default_version": "v1beta3", - "distribution_name": "google-cloud-dataflow-client", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "dataflow", - "name_pretty": "Dataflow", - "product_documentation": "https://cloud.google.com/dataflow/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Unified stream and batch data processing that's serverless, fast, and cost-effective.", + "api_id": "dataflow.googleapis.com", + "api_shortname": "dataflow", + "client_documentation": "https://cloud.google.com/python/docs/reference/dataflow/latest", + "default_version": "v1beta3", + "distribution_name": "google-cloud-dataflow-client", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "dataflow", + "name_pretty": "Dataflow", + "product_documentation": "https://cloud.google.com/dataflow/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-dataform/.repo-metadata.json b/packages/google-cloud-dataform/.repo-metadata.json index ee54cf95a77f..53d25c058578 100644 --- a/packages/google-cloud-dataform/.repo-metadata.json +++ b/packages/google-cloud-dataform/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "", - "api_id": "dataform.googleapis.com", - "api_shortname": "dataform", - "client_documentation": "https://cloud.google.com/python/docs/reference/dataform/latest", - "default_version": "v1beta1", - "distribution_name": "google-cloud-dataform", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "dataform", - "name_pretty": "Cloud Dataform", - "product_documentation": "https://cloud.google.com", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Service to develop, version control, and operationalize SQL pipelines in\nBigQuery.", + "api_id": "dataform.googleapis.com", + "api_shortname": "dataform", + "client_documentation": "https://cloud.google.com/python/docs/reference/dataform/latest", + "default_version": "v1beta1", + "distribution_name": "google-cloud-dataform", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=994183", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "dataform", + "name_pretty": "Cloud Dataform", + "product_documentation": "https://cloud.google.com", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-dataform/README.rst b/packages/google-cloud-dataform/README.rst index e928f612700a..1d81c1f8975e 100644 --- a/packages/google-cloud-dataform/README.rst +++ b/packages/google-cloud-dataform/README.rst @@ -3,7 +3,8 @@ Python Client for Cloud Dataform |preview| |pypi| |versions| -`Cloud Dataform`_: +`Cloud Dataform`_: Service to develop, version control, and operationalize SQL pipelines in +BigQuery. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-dataform/docs/README.rst b/packages/google-cloud-dataform/docs/README.rst index e928f612700a..1d81c1f8975e 100644 --- a/packages/google-cloud-dataform/docs/README.rst +++ b/packages/google-cloud-dataform/docs/README.rst @@ -3,7 +3,8 @@ Python Client for Cloud Dataform |preview| |pypi| |versions| -`Cloud Dataform`_: +`Cloud Dataform`_: Service to develop, version control, and operationalize SQL pipelines in +BigQuery. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-datalabeling/.repo-metadata.json b/packages/google-cloud-datalabeling/.repo-metadata.json index bbb12b4861bd..363958f710ec 100644 --- a/packages/google-cloud-datalabeling/.repo-metadata.json +++ b/packages/google-cloud-datalabeling/.repo-metadata.json @@ -1,17 +1,15 @@ { - "api_description": "is a service that lets you work with human labelers to generate highly accurate labels for a collection of data that you can use to train your machine learning models.", - "api_id": "datalabeling.googleapis.com", - "api_shortname": "datalabeling", - "client_documentation": "https://cloud.google.com/python/docs/reference/datalabeling/latest", - "default_version": "v1beta1", - "distribution_name": "google-cloud-datalabeling", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "datalabeling", - "name_pretty": "Google Cloud Data Labeling", - "product_documentation": "https://cloud.google.com/data-labeling/docs/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "is a service that lets you work with human labelers to generate highly accurate labels for a collection of data that you can use to train your machine learning models.", + "api_id": "datalabeling.googleapis.com", + "api_shortname": "datalabeling", + "client_documentation": "https://cloud.google.com/python/docs/reference/datalabeling/latest", + "default_version": "v1beta1", + "distribution_name": "google-cloud-datalabeling", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "datalabeling", + "name_pretty": "Google Cloud Data Labeling", + "product_documentation": "https://cloud.google.com/data-labeling/docs/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-dataplex/.repo-metadata.json b/packages/google-cloud-dataplex/.repo-metadata.json index 6b35a4614c00..7d367f47c2a0 100644 --- a/packages/google-cloud-dataplex/.repo-metadata.json +++ b/packages/google-cloud-dataplex/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "provides intelligent data fabric that enables organizations to centrally manage, monitor, and govern their data across data lakes, data warehouses, and data marts with consistent controls, providing access to trusted data and powering analytics at scale.", - "api_id": "dataplex.googleapis.com", - "api_shortname": "dataplex", - "client_documentation": "https://cloud.google.com/python/docs/reference/dataplex/latest", - "default_version": "v1", - "distribution_name": "google-cloud-dataplex", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "dataplex", - "name_pretty": "Cloud Dataplex", - "product_documentation": "https://cloud.google.com/dataplex", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "provides intelligent data fabric that enables organizations to centrally manage, monitor, and govern their data across data lakes, data warehouses, and data marts with consistent controls, providing access to trusted data and powering analytics at scale.", + "api_id": "dataplex.googleapis.com", + "api_shortname": "dataplex", + "client_documentation": "https://cloud.google.com/python/docs/reference/dataplex/latest", + "default_version": "v1", + "distribution_name": "google-cloud-dataplex", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1155079\u0026template=1656695", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "dataplex", + "name_pretty": "Cloud Dataplex", + "product_documentation": "https://cloud.google.com/dataplex", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-dataproc-metastore/.repo-metadata.json b/packages/google-cloud-dataproc-metastore/.repo-metadata.json index 9aebdf83137d..d205409a2282 100644 --- a/packages/google-cloud-dataproc-metastore/.repo-metadata.json +++ b/packages/google-cloud-dataproc-metastore/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "is a fully managed, highly available, autoscaled, autohealing, OSS-native metastore service that greatly simplifies technical metadata management. Dataproc Metastore service is based on Apache Hive metastore and serves as a critical component towards enterprise data lakes.", - "api_id": "metastore.googleapis.com", - "api_shortname": "metastore", - "client_documentation": "https://cloud.google.com/python/docs/reference/metastore/latest", - "default_version": "v1", - "distribution_name": "google-cloud-dataproc-metastore", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "metastore", - "name_pretty": "Dataproc Metastore", - "product_documentation": "https://cloud.google.com/dataproc-metastore/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "is a fully managed, highly available, autoscaled, autohealing, OSS-native metastore service that greatly simplifies technical metadata management. Dataproc Metastore service is based on Apache Hive metastore and serves as a critical component towards enterprise data lakes.", + "api_id": "metastore.googleapis.com", + "api_shortname": "metastore", + "client_documentation": "https://cloud.google.com/python/docs/reference/metastore/latest", + "default_version": "v1", + "distribution_name": "google-cloud-dataproc-metastore", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "metastore", + "name_pretty": "Dataproc Metastore", + "product_documentation": "https://cloud.google.com/dataproc-metastore/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-dataproc/.repo-metadata.json b/packages/google-cloud-dataproc/.repo-metadata.json index 541c433add08..f4293203a2fd 100644 --- a/packages/google-cloud-dataproc/.repo-metadata.json +++ b/packages/google-cloud-dataproc/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "is a faster, easier, more cost-effective way to run Apache Spark and Apache Hadoop.", - "api_id": "dataproc.googleapis.com", - "api_shortname": "dataproc", - "client_documentation": "https://cloud.google.com/python/docs/reference/dataproc/latest", - "default_version": "v1", - "distribution_name": "google-cloud-dataproc", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559745", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "dataproc", - "name_pretty": "Google Cloud Dataproc", - "product_documentation": "https://cloud.google.com/dataproc", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "is a faster, easier, more cost-effective way to run Apache Spark and Apache Hadoop.", + "api_id": "dataproc.googleapis.com", + "api_shortname": "dataproc", + "client_documentation": "https://cloud.google.com/python/docs/reference/dataproc/latest", + "default_version": "v1", + "distribution_name": "google-cloud-dataproc", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559745", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "dataproc", + "name_pretty": "Google Cloud Dataproc", + "product_documentation": "https://cloud.google.com/dataproc", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-datastore/.repo-metadata.json b/packages/google-cloud-datastore/.repo-metadata.json index dfde98a922a5..904ca8f9efe8 100644 --- a/packages/google-cloud-datastore/.repo-metadata.json +++ b/packages/google-cloud-datastore/.repo-metadata.json @@ -1,17 +1,16 @@ { - "name": "datastore", - "name_pretty": "Google Cloud Datastore API", - "product_documentation": "https://cloud.google.com/datastore", + "api_description": "is a fully managed, schemaless database for\nstoring non-relational data. Cloud Datastore automatically scales with\nyour users and supports ACID transactions, high availability of reads and\nwrites, strong consistency for reads and ancestor queries, and eventual\nconsistency for all other queries.", + "api_id": "datastore.googleapis.com", + "api_shortname": "datastore", "client_documentation": "https://cloud.google.com/python/docs/reference/datastore/latest", + "default_version": "v1", + "distribution_name": "google-cloud-datastore", "issue_tracker": "https://issuetracker.google.com/savedsearches/559768", - "release_level": "stable", "language": "python", "library_type": "GAPIC_COMBO", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-datastore", - "api_id": "datastore.googleapis.com", - "default_version": "v1", - "codeowner_team": "@googleapis/cloud-native-db-dpes @googleapis/api-datastore-sdk @googleapis/api-firestore-partners", - "api_shortname": "datastore", - "api_description": "is a fully managed, schemaless database for\nstoring non-relational data. Cloud Datastore automatically scales with\nyour users and supports ACID transactions, high availability of reads and\nwrites, strong consistency for reads and ancestor queries, and eventual\nconsistency for all other queries." -} + "name": "datastore", + "name_pretty": "Google Cloud Datastore API", + "product_documentation": "https://cloud.google.com/datastore", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-datastream/.repo-metadata.json b/packages/google-cloud-datastream/.repo-metadata.json index ff8c0d440520..066b6b830b45 100644 --- a/packages/google-cloud-datastream/.repo-metadata.json +++ b/packages/google-cloud-datastream/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "is a serverless and easy-to-use change data capture (CDC) and replication service. It allows you to synchronize data across heterogeneous databases and applications reliably, and with minimal latency and downtime.", - "api_id": "datastream.googleapis.com", - "api_shortname": "datastream", - "client_documentation": "https://cloud.google.com/python/docs/reference/datastream/latest", - "default_version": "v1", - "distribution_name": "google-cloud-datastream", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "datastream", - "name_pretty": "Datastream", - "product_documentation": "https://cloud.google.com/datastream/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "is a serverless and easy-to-use change data capture (CDC) and replication service. It allows you to synchronize data across heterogeneous databases and applications reliably, and with minimal latency and downtime.", + "api_id": "datastream.googleapis.com", + "api_shortname": "datastream", + "client_documentation": "https://cloud.google.com/python/docs/reference/datastream/latest", + "default_version": "v1", + "distribution_name": "google-cloud-datastream", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "datastream", + "name_pretty": "Datastream", + "product_documentation": "https://cloud.google.com/datastream/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-deploy/.repo-metadata.json b/packages/google-cloud-deploy/.repo-metadata.json index f8536538e1bb..df3b6aa6070e 100644 --- a/packages/google-cloud-deploy/.repo-metadata.json +++ b/packages/google-cloud-deploy/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "is a service that automates delivery of your applications to a series of target environments in a defined sequence", - "api_id": "clouddeploy.googleapis.com", - "api_shortname": "clouddeploy", - "client_documentation": "https://cloud.google.com/python/docs/reference/clouddeploy/latest", - "default_version": "v1", - "distribution_name": "google-cloud-deploy", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "clouddeploy", - "name_pretty": "Google Cloud Deploy", - "product_documentation": "https://cloud.google.com/deploy/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "is a service that automates delivery of your applications to a series of target environments in a defined sequence", + "api_id": "clouddeploy.googleapis.com", + "api_shortname": "clouddeploy", + "client_documentation": "https://cloud.google.com/python/docs/reference/clouddeploy/latest", + "default_version": "v1", + "distribution_name": "google-cloud-deploy", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "clouddeploy", + "name_pretty": "Google Cloud Deploy", + "product_documentation": "https://cloud.google.com/deploy/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-developerconnect/.repo-metadata.json b/packages/google-cloud-developerconnect/.repo-metadata.json index 265c806ec3a8..9ec078f2fe69 100644 --- a/packages/google-cloud-developerconnect/.repo-metadata.json +++ b/packages/google-cloud-developerconnect/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Developer Connect streamlines integration with third-party source code management platforms by simplifying authentication, authorization, and networking configuration.", - "api_id": "developerconnect.googleapis.com", - "api_shortname": "developerconnect", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-developerconnect/latest", - "default_version": "v1", - "distribution_name": "google-cloud-developerconnect", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1446966&template=1822025", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-developerconnect", - "name_pretty": "Developer Connect API", - "product_documentation": "https://cloud.google.com/developer-connect/docs/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Developer Connect streamlines integration with third-party source code management platforms by simplifying authentication, authorization, and networking configuration.", + "api_id": "developerconnect.googleapis.com", + "api_shortname": "developerconnect", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-developerconnect/latest", + "default_version": "v1", + "distribution_name": "google-cloud-developerconnect", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1446966\u0026template=1822025", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-developerconnect", + "name_pretty": "Developer Connect API", + "product_documentation": "https://cloud.google.com/developer-connect/docs/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-devicestreaming/.repo-metadata.json b/packages/google-cloud-devicestreaming/.repo-metadata.json index 84b437a0b78b..d12559721932 100644 --- a/packages/google-cloud-devicestreaming/.repo-metadata.json +++ b/packages/google-cloud-devicestreaming/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "The Cloud API for device streaming usage.", - "api_id": "devicestreaming.googleapis.com", - "api_shortname": "devicestreaming", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-devicestreaming/latest", - "default_version": "v1", - "distribution_name": "google-cloud-devicestreaming", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1767292&template=2112325", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-devicestreaming", - "name_pretty": "Device Streaming API", - "product_documentation": "https://cloud.google.com/device-streaming/docs", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "The Cloud API for device streaming usage.", + "api_id": "devicestreaming.googleapis.com", + "api_shortname": "devicestreaming", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-devicestreaming/latest", + "default_version": "v1", + "distribution_name": "google-cloud-devicestreaming", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1767292\u0026template=2112325", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-devicestreaming", + "name_pretty": "Device Streaming API", + "product_documentation": "https://cloud.google.com/device-streaming/docs", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-dialogflow-cx/.repo-metadata.json b/packages/google-cloud-dialogflow-cx/.repo-metadata.json index 061e9e93eb7b..e7b8df5971ad 100644 --- a/packages/google-cloud-dialogflow-cx/.repo-metadata.json +++ b/packages/google-cloud-dialogflow-cx/.repo-metadata.json @@ -1,15 +1,16 @@ { - "api_id": "dialogflow.googleapis.com", - "api_shortname": "dialogflow", - "client_documentation": "https://cloud.google.com/python/docs/reference/dialogflow-cx/latest", - "default_version": "v3", - "distribution_name": "google-cloud-dialogflow-cx", - "issue_tracker": "https://issuetracker.google.com/savedsearches/5300385", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "dialogflow-cx", - "name_pretty": "Dialogflow CX", - "product_documentation": "https://cloud.google.com/dialogflow/cx/docs", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Builds conversational interfaces (for example, chatbots, and voice-powered\napps and devices).", + "api_id": "dialogflow.googleapis.com", + "api_shortname": "dialogflow", + "client_documentation": "https://cloud.google.com/python/docs/reference/dialogflow-cx/latest", + "default_version": "v3", + "distribution_name": "google-cloud-dialogflow-cx", + "issue_tracker": "https://issuetracker.google.com/savedsearches/5300385", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "dialogflow-cx", + "name_pretty": "Dialogflow CX", + "product_documentation": "https://cloud.google.com/dialogflow/cx/docs", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-dialogflow-cx/README.rst b/packages/google-cloud-dialogflow-cx/README.rst index 7645aae0b3a3..bfad9bcb312e 100644 --- a/packages/google-cloud-dialogflow-cx/README.rst +++ b/packages/google-cloud-dialogflow-cx/README.rst @@ -3,7 +3,8 @@ Python Client for Dialogflow CX |stable| |pypi| |versions| -`Dialogflow CX`_: +`Dialogflow CX`_: Builds conversational interfaces (for example, chatbots, and voice-powered +apps and devices). - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-dialogflow-cx/docs/README.rst b/packages/google-cloud-dialogflow-cx/docs/README.rst index 7645aae0b3a3..bfad9bcb312e 100644 --- a/packages/google-cloud-dialogflow-cx/docs/README.rst +++ b/packages/google-cloud-dialogflow-cx/docs/README.rst @@ -3,7 +3,8 @@ Python Client for Dialogflow CX |stable| |pypi| |versions| -`Dialogflow CX`_: +`Dialogflow CX`_: Builds conversational interfaces (for example, chatbots, and voice-powered +apps and devices). - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-dialogflow/.repo-metadata.json b/packages/google-cloud-dialogflow/.repo-metadata.json index 3d48327fffc9..57cbe8f2a54a 100644 --- a/packages/google-cloud-dialogflow/.repo-metadata.json +++ b/packages/google-cloud-dialogflow/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "is an end-to-end, build-once deploy-everywhere development suite for creating conversational interfaces for websites, mobile applications, popular messaging platforms, and IoT devices. You can use it to build interfaces (such as chatbots and conversational IVR) that enable natural and rich interactions between your users and your business. Dialogflow Enterprise Edition users have access to Google Cloud Support and a service level agreement (SLA) for production deployments.", - "api_id": "dialogflow.googleapis.com", - "api_shortname": "dialogflow", - "client_documentation": "https://cloud.google.com/python/docs/reference/dialogflow/latest", - "default_version": "v2", - "distribution_name": "google-cloud-dialogflow", - "issue_tracker": "https://issuetracker.google.com/savedsearches/5300385", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "dialogflow", - "name_pretty": "Dialogflow", - "product_documentation": "https://www.dialogflow.com/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "is an end-to-end, build-once deploy-everywhere development suite for creating conversational interfaces for websites, mobile applications, popular messaging platforms, and IoT devices. You can use it to build interfaces (such as chatbots and conversational IVR) that enable natural and rich interactions between your users and your business. Dialogflow Enterprise Edition users have access to Google Cloud Support and a service level agreement (SLA) for production deployments.", + "api_id": "dialogflow.googleapis.com", + "api_shortname": "dialogflow", + "client_documentation": "https://cloud.google.com/python/docs/reference/dialogflow/latest", + "default_version": "v2", + "distribution_name": "google-cloud-dialogflow", + "issue_tracker": "https://issuetracker.google.com/savedsearches/5300385", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "dialogflow", + "name_pretty": "Dialogflow", + "product_documentation": "https://www.dialogflow.com/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow/__init__.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow/__init__.py index df8571957553..60ec39e82c6c 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow/__init__.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow/__init__.py @@ -158,6 +158,8 @@ TelephonyDtmfEvents, VoiceSelectionParams, ) +from google.cloud.dialogflow_v2.types.ces_app import CesAppSpec +from google.cloud.dialogflow_v2.types.ces_tool import CesToolSpec from google.cloud.dialogflow_v2.types.context import ( Context, CreateContextRequest, @@ -186,6 +188,7 @@ ListMessagesRequest, ListMessagesResponse, SearchKnowledgeAnswer, + SearchKnowledgeDebugInfo, SearchKnowledgeRequest, SearchKnowledgeResponse, SuggestConversationSummaryRequest, @@ -382,20 +385,24 @@ AudioInput, AutomatedAgentReply, CreateParticipantRequest, + DatastoreResponseReason, DialogflowAssistAnswer, DtmfParameters, FaqAnswer, GenerateSuggestionsResponse, GetParticipantRequest, + IngestedContextReferenceDebugInfo, InputTextConfig, IntentSuggestion, KnowledgeAssistAnswer, + KnowledgeAssistDebugInfo, ListParticipantsRequest, ListParticipantsResponse, Message, MessageAnnotation, OutputAudio, Participant, + ServiceLatency, SmartReplyAnswer, StreamingAnalyzeContentRequest, StreamingAnalyzeContentResponse, @@ -456,6 +463,7 @@ UpdateToolRequest, ) from google.cloud.dialogflow_v2.types.tool_call import ToolCall, ToolCallResult +from google.cloud.dialogflow_v2.types.toolset import ToolsetTool from google.cloud.dialogflow_v2.types.validation_result import ( ValidationError, ValidationResult, @@ -554,6 +562,8 @@ "SpeechModelVariant", "SsmlVoiceGender", "TelephonyDtmf", + "CesAppSpec", + "CesToolSpec", "Context", "CreateContextRequest", "DeleteAllContextsRequest", @@ -579,6 +589,7 @@ "ListMessagesRequest", "ListMessagesResponse", "SearchKnowledgeAnswer", + "SearchKnowledgeDebugInfo", "SearchKnowledgeRequest", "SearchKnowledgeResponse", "SuggestConversationSummaryRequest", @@ -751,15 +762,18 @@ "FaqAnswer", "GenerateSuggestionsResponse", "GetParticipantRequest", + "IngestedContextReferenceDebugInfo", "InputTextConfig", "IntentSuggestion", "KnowledgeAssistAnswer", + "KnowledgeAssistDebugInfo", "ListParticipantsRequest", "ListParticipantsResponse", "Message", "MessageAnnotation", "OutputAudio", "Participant", + "ServiceLatency", "SmartReplyAnswer", "StreamingAnalyzeContentRequest", "StreamingAnalyzeContentResponse", @@ -774,6 +788,7 @@ "SuggestSmartRepliesRequest", "SuggestSmartRepliesResponse", "UpdateParticipantRequest", + "DatastoreResponseReason", "CloudConversationDebuggingInfo", "DetectIntentRequest", "DetectIntentResponse", @@ -812,6 +827,7 @@ "UpdateToolRequest", "ToolCall", "ToolCallResult", + "ToolsetTool", "ValidationError", "ValidationResult", "CreateVersionRequest", diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/__init__.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/__init__.py index 831da88affb4..746bb025627e 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/__init__.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/__init__.py @@ -109,6 +109,8 @@ TelephonyDtmfEvents, VoiceSelectionParams, ) +from .types.ces_app import CesAppSpec +from .types.ces_tool import CesToolSpec from .types.context import ( Context, CreateContextRequest, @@ -137,6 +139,7 @@ ListMessagesRequest, ListMessagesResponse, SearchKnowledgeAnswer, + SearchKnowledgeDebugInfo, SearchKnowledgeRequest, SearchKnowledgeResponse, SuggestConversationSummaryRequest, @@ -329,20 +332,24 @@ AudioInput, AutomatedAgentReply, CreateParticipantRequest, + DatastoreResponseReason, DialogflowAssistAnswer, DtmfParameters, FaqAnswer, GenerateSuggestionsResponse, GetParticipantRequest, + IngestedContextReferenceDebugInfo, InputTextConfig, IntentSuggestion, KnowledgeAssistAnswer, + KnowledgeAssistDebugInfo, ListParticipantsRequest, ListParticipantsResponse, Message, MessageAnnotation, OutputAudio, Participant, + ServiceLatency, SmartReplyAnswer, StreamingAnalyzeContentRequest, StreamingAnalyzeContentResponse, @@ -403,6 +410,7 @@ UpdateToolRequest, ) from .types.tool_call import ToolCall, ToolCallResult +from .types.toolset import ToolsetTool from .types.validation_result import ValidationError, ValidationResult from .types.version import ( CreateVersionRequest, @@ -561,6 +569,8 @@ def _get_version(dependency_name): "BatchUpdateEntityTypesResponse", "BatchUpdateIntentsRequest", "BatchUpdateIntentsResponse", + "CesAppSpec", + "CesToolSpec", "ClearSuggestionFeatureConfigOperationMetadata", "ClearSuggestionFeatureConfigRequest", "CloudConversationDebuggingInfo", @@ -603,6 +613,7 @@ def _get_version(dependency_name): "CreateToolRequest", "CreateVersionRequest", "CustomPronunciationParams", + "DatastoreResponseReason", "DeleteAgentRequest", "DeleteAllContextsRequest", "DeleteContextRequest", @@ -702,6 +713,7 @@ def _get_version(dependency_name): "InferenceParameter", "IngestContextReferencesRequest", "IngestContextReferencesResponse", + "IngestedContextReferenceDebugInfo", "InitializeEncryptionSpecMetadata", "InitializeEncryptionSpecRequest", "InitializeEncryptionSpecResponse", @@ -715,6 +727,7 @@ def _get_version(dependency_name): "IntentView", "IntentsClient", "KnowledgeAssistAnswer", + "KnowledgeAssistDebugInfo", "KnowledgeBase", "KnowledgeBasesClient", "KnowledgeOperationMetadata", @@ -778,11 +791,13 @@ def _get_version(dependency_name): "SearchAgentsRequest", "SearchAgentsResponse", "SearchKnowledgeAnswer", + "SearchKnowledgeDebugInfo", "SearchKnowledgeRequest", "SearchKnowledgeResponse", "Sentiment", "SentimentAnalysisRequestConfig", "SentimentAnalysisResult", + "ServiceLatency", "SessionEntityType", "SessionEntityTypesClient", "SessionsClient", @@ -832,6 +847,7 @@ def _get_version(dependency_name): "ToolCall", "ToolCallResult", "ToolsClient", + "ToolsetTool", "TrainAgentRequest", "TriggerEvent", "UndeployConversationModelOperationMetadata", diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/async_client.py index 3af188307ef1..ca4a053584c3 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/async_client.py @@ -84,12 +84,18 @@ class AnswerRecordsAsyncClient: parse_answer_record_path = staticmethod( AnswerRecordsClient.parse_answer_record_path ) + app_path = staticmethod(AnswerRecordsClient.app_path) + parse_app_path = staticmethod(AnswerRecordsClient.parse_app_path) context_path = staticmethod(AnswerRecordsClient.context_path) parse_context_path = staticmethod(AnswerRecordsClient.parse_context_path) intent_path = staticmethod(AnswerRecordsClient.intent_path) parse_intent_path = staticmethod(AnswerRecordsClient.parse_intent_path) tool_path = staticmethod(AnswerRecordsClient.tool_path) parse_tool_path = staticmethod(AnswerRecordsClient.parse_tool_path) + tool_path = staticmethod(AnswerRecordsClient.tool_path) + parse_tool_path = staticmethod(AnswerRecordsClient.parse_tool_path) + toolset_path = staticmethod(AnswerRecordsClient.toolset_path) + parse_toolset_path = staticmethod(AnswerRecordsClient.parse_toolset_path) common_billing_account_path = staticmethod( AnswerRecordsClient.common_billing_account_path ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/client.py index 0c160ceb3b62..2e3adc655733 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/client.py @@ -252,6 +252,28 @@ def parse_answer_record_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def app_path( + project: str, + location: str, + app: str, + ) -> str: + """Returns a fully-qualified app string.""" + return "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + + @staticmethod + def parse_app_path(path: str) -> Dict[str, str]: + """Parses a app path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def context_path( project: str, @@ -291,6 +313,30 @@ def parse_intent_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/agent/intents/(?P.+?)$", path) return m.groupdict() if m else {} + @staticmethod + def tool_path( + project: str, + location: str, + app: str, + tool: str, + ) -> str: + """Returns a fully-qualified tool string.""" + return "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + + @staticmethod + def parse_tool_path(path: str) -> Dict[str, str]: + """Parses a tool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/tools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def tool_path( project: str, @@ -313,6 +359,30 @@ def parse_tool_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def toolset_path( + project: str, + location: str, + app: str, + toolset: str, + ) -> str: + """Returns a fully-qualified toolset string.""" + return "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + + @staticmethod + def parse_toolset_path(path: str) -> Dict[str, str]: + """Parses a toolset path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/toolsets/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/async_client.py index 177da090a198..57762d5eb10d 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/async_client.py @@ -49,7 +49,12 @@ from google.longrunning import operations_pb2 # type: ignore from google.cloud.dialogflow_v2.services.conversations import pagers -from google.cloud.dialogflow_v2.types import conversation, generator, participant +from google.cloud.dialogflow_v2.types import ( + conversation, + conversation_profile, + generator, + participant, +) from google.cloud.dialogflow_v2.types import conversation as gcd_conversation from .client import ConversationsClient @@ -86,6 +91,8 @@ class ConversationsAsyncClient: parse_answer_record_path = staticmethod( ConversationsClient.parse_answer_record_path ) + app_path = staticmethod(ConversationsClient.app_path) + parse_app_path = staticmethod(ConversationsClient.parse_app_path) conversation_path = staticmethod(ConversationsClient.conversation_path) parse_conversation_path = staticmethod(ConversationsClient.parse_conversation_path) conversation_model_path = staticmethod(ConversationsClient.conversation_model_path) @@ -120,6 +127,10 @@ class ConversationsAsyncClient: parse_phrase_set_path = staticmethod(ConversationsClient.parse_phrase_set_path) tool_path = staticmethod(ConversationsClient.tool_path) parse_tool_path = staticmethod(ConversationsClient.parse_tool_path) + tool_path = staticmethod(ConversationsClient.tool_path) + parse_tool_path = staticmethod(ConversationsClient.parse_tool_path) + toolset_path = staticmethod(ConversationsClient.toolset_path) + parse_toolset_path = staticmethod(ConversationsClient.parse_toolset_path) common_billing_account_path = staticmethod( ConversationsClient.common_billing_account_path ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/client.py index 659531046ae9..bed12d5bfabd 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/client.py @@ -66,7 +66,12 @@ from google.longrunning import operations_pb2 # type: ignore from google.cloud.dialogflow_v2.services.conversations import pagers -from google.cloud.dialogflow_v2.types import conversation, generator, participant +from google.cloud.dialogflow_v2.types import ( + conversation, + conversation_profile, + generator, + participant, +) from google.cloud.dialogflow_v2.types import conversation as gcd_conversation from .transports.base import DEFAULT_CLIENT_INFO, ConversationsTransport @@ -267,6 +272,28 @@ def parse_answer_record_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def app_path( + project: str, + location: str, + app: str, + ) -> str: + """Returns a fully-qualified app string.""" + return "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + + @staticmethod + def parse_app_path(path: str) -> Dict[str, str]: + """Parses a app path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def conversation_path( project: str, @@ -505,6 +532,54 @@ def parse_tool_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def tool_path( + project: str, + location: str, + app: str, + tool: str, + ) -> str: + """Returns a fully-qualified tool string.""" + return "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + + @staticmethod + def parse_tool_path(path: str) -> Dict[str, str]: + """Parses a tool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/tools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def toolset_path( + project: str, + location: str, + app: str, + toolset: str, + ) -> str: + """Returns a fully-qualified toolset string.""" + return "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + + @staticmethod + def parse_toolset_path(path: str) -> Dict[str, str]: + """Parses a toolset path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/toolsets/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generator_evaluations/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generator_evaluations/async_client.py index d03e14c4f2e2..13dda1730559 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generator_evaluations/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generator_evaluations/async_client.py @@ -82,6 +82,8 @@ class GeneratorEvaluationsAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = GeneratorEvaluationsClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = GeneratorEvaluationsClient._DEFAULT_UNIVERSE + app_path = staticmethod(GeneratorEvaluationsClient.app_path) + parse_app_path = staticmethod(GeneratorEvaluationsClient.parse_app_path) generator_path = staticmethod(GeneratorEvaluationsClient.generator_path) parse_generator_path = staticmethod(GeneratorEvaluationsClient.parse_generator_path) generator_evaluation_path = staticmethod( @@ -92,6 +94,10 @@ class GeneratorEvaluationsAsyncClient: ) tool_path = staticmethod(GeneratorEvaluationsClient.tool_path) parse_tool_path = staticmethod(GeneratorEvaluationsClient.parse_tool_path) + tool_path = staticmethod(GeneratorEvaluationsClient.tool_path) + parse_tool_path = staticmethod(GeneratorEvaluationsClient.parse_tool_path) + toolset_path = staticmethod(GeneratorEvaluationsClient.toolset_path) + parse_toolset_path = staticmethod(GeneratorEvaluationsClient.parse_toolset_path) common_billing_account_path = staticmethod( GeneratorEvaluationsClient.common_billing_account_path ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generator_evaluations/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generator_evaluations/client.py index 294919aad191..66c347dcc523 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generator_evaluations/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generator_evaluations/client.py @@ -235,6 +235,28 @@ def transport(self) -> GeneratorEvaluationsTransport: """ return self._transport + @staticmethod + def app_path( + project: str, + location: str, + app: str, + ) -> str: + """Returns a fully-qualified app string.""" + return "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + + @staticmethod + def parse_app_path(path: str) -> Dict[str, str]: + """Parses a app path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def generator_path( project: str, @@ -281,6 +303,30 @@ def parse_generator_evaluation_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def tool_path( + project: str, + location: str, + app: str, + tool: str, + ) -> str: + """Returns a fully-qualified tool string.""" + return "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + + @staticmethod + def parse_tool_path(path: str) -> Dict[str, str]: + """Parses a tool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/tools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def tool_path( project: str, @@ -303,6 +349,30 @@ def parse_tool_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def toolset_path( + project: str, + location: str, + app: str, + toolset: str, + ) -> str: + """Returns a fully-qualified toolset string.""" + return "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + + @staticmethod + def parse_toolset_path(path: str) -> Dict[str, str]: + """Parses a toolset path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/toolsets/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/async_client.py index dd6fa36fe6d9..5e46084b6f57 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/async_client.py @@ -50,7 +50,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.cloud.dialogflow_v2.services.generators import pagers -from google.cloud.dialogflow_v2.types import generator +from google.cloud.dialogflow_v2.types import ces_app, ces_tool, generator, toolset from google.cloud.dialogflow_v2.types import generator as gcd_generator from .client import GeneratorsClient @@ -84,10 +84,16 @@ class GeneratorsAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = GeneratorsClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = GeneratorsClient._DEFAULT_UNIVERSE + app_path = staticmethod(GeneratorsClient.app_path) + parse_app_path = staticmethod(GeneratorsClient.parse_app_path) generator_path = staticmethod(GeneratorsClient.generator_path) parse_generator_path = staticmethod(GeneratorsClient.parse_generator_path) tool_path = staticmethod(GeneratorsClient.tool_path) parse_tool_path = staticmethod(GeneratorsClient.parse_tool_path) + tool_path = staticmethod(GeneratorsClient.tool_path) + parse_tool_path = staticmethod(GeneratorsClient.parse_tool_path) + toolset_path = staticmethod(GeneratorsClient.toolset_path) + parse_toolset_path = staticmethod(GeneratorsClient.parse_toolset_path) common_billing_account_path = staticmethod( GeneratorsClient.common_billing_account_path ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/client.py index 365855fd35c1..27a8720cb713 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/client.py @@ -67,7 +67,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.cloud.dialogflow_v2.services.generators import pagers -from google.cloud.dialogflow_v2.types import generator +from google.cloud.dialogflow_v2.types import ces_app, ces_tool, generator, toolset from google.cloud.dialogflow_v2.types import generator as gcd_generator from .transports.base import DEFAULT_CLIENT_INFO, GeneratorsTransport @@ -237,6 +237,28 @@ def transport(self) -> GeneratorsTransport: """ return self._transport + @staticmethod + def app_path( + project: str, + location: str, + app: str, + ) -> str: + """Returns a fully-qualified app string.""" + return "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + + @staticmethod + def parse_app_path(path: str) -> Dict[str, str]: + """Parses a app path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def generator_path( project: str, @@ -259,6 +281,30 @@ def parse_generator_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def tool_path( + project: str, + location: str, + app: str, + tool: str, + ) -> str: + """Returns a fully-qualified tool string.""" + return "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + + @staticmethod + def parse_tool_path(path: str) -> Dict[str, str]: + """Parses a tool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/tools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def tool_path( project: str, @@ -281,6 +327,30 @@ def parse_tool_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def toolset_path( + project: str, + location: str, + app: str, + toolset: str, + ) -> str: + """Returns a fully-qualified toolset string.""" + return "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + + @staticmethod + def parse_toolset_path(path: str) -> Dict[str, str]: + """Parses a toolset path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/toolsets/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/async_client.py index 508ada1daf04..3848ed3fd417 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/async_client.py @@ -85,6 +85,8 @@ class ParticipantsAsyncClient: answer_record_path = staticmethod(ParticipantsClient.answer_record_path) parse_answer_record_path = staticmethod(ParticipantsClient.parse_answer_record_path) + app_path = staticmethod(ParticipantsClient.app_path) + parse_app_path = staticmethod(ParticipantsClient.parse_app_path) context_path = staticmethod(ParticipantsClient.context_path) parse_context_path = staticmethod(ParticipantsClient.parse_context_path) intent_path = staticmethod(ParticipantsClient.intent_path) @@ -101,6 +103,10 @@ class ParticipantsAsyncClient: ) tool_path = staticmethod(ParticipantsClient.tool_path) parse_tool_path = staticmethod(ParticipantsClient.parse_tool_path) + tool_path = staticmethod(ParticipantsClient.tool_path) + parse_tool_path = staticmethod(ParticipantsClient.parse_tool_path) + toolset_path = staticmethod(ParticipantsClient.toolset_path) + parse_toolset_path = staticmethod(ParticipantsClient.parse_toolset_path) common_billing_account_path = staticmethod( ParticipantsClient.common_billing_account_path ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py index 46a274dbd63a..018b664750e2 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py @@ -254,6 +254,28 @@ def parse_answer_record_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def app_path( + project: str, + location: str, + app: str, + ) -> str: + """Returns a fully-qualified app string.""" + return "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + + @staticmethod + def parse_app_path(path: str) -> Dict[str, str]: + """Parses a app path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def context_path( project: str, @@ -405,6 +427,54 @@ def parse_tool_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def tool_path( + project: str, + location: str, + app: str, + tool: str, + ) -> str: + """Returns a fully-qualified tool string.""" + return "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + + @staticmethod + def parse_tool_path(path: str) -> Dict[str, str]: + """Parses a tool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/tools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def toolset_path( + project: str, + location: str, + app: str, + toolset: str, + ) -> str: + """Returns a fully-qualified toolset string.""" + return "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + + @staticmethod + def parse_toolset_path(path: str) -> Dict[str, str]: + """Parses a toolset path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/toolsets/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/__init__.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/__init__.py index 672bfeb257f6..e595297116fa 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/__init__.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/__init__.py @@ -55,6 +55,12 @@ TelephonyDtmfEvents, VoiceSelectionParams, ) +from .ces_app import ( + CesAppSpec, +) +from .ces_tool import ( + CesToolSpec, +) from .context import ( Context, CreateContextRequest, @@ -83,6 +89,7 @@ ListMessagesRequest, ListMessagesResponse, SearchKnowledgeAnswer, + SearchKnowledgeDebugInfo, SearchKnowledgeRequest, SearchKnowledgeResponse, SuggestConversationSummaryRequest, @@ -284,20 +291,24 @@ AudioInput, AutomatedAgentReply, CreateParticipantRequest, + DatastoreResponseReason, DialogflowAssistAnswer, DtmfParameters, FaqAnswer, GenerateSuggestionsResponse, GetParticipantRequest, + IngestedContextReferenceDebugInfo, InputTextConfig, IntentSuggestion, KnowledgeAssistAnswer, + KnowledgeAssistDebugInfo, ListParticipantsRequest, ListParticipantsResponse, Message, MessageAnnotation, OutputAudio, Participant, + ServiceLatency, SmartReplyAnswer, StreamingAnalyzeContentRequest, StreamingAnalyzeContentResponse, @@ -361,6 +372,9 @@ ToolCall, ToolCallResult, ) +from .toolset import ( + ToolsetTool, +) from .validation_result import ( ValidationError, ValidationResult, @@ -415,6 +429,8 @@ "SpeechModelVariant", "SsmlVoiceGender", "TelephonyDtmf", + "CesAppSpec", + "CesToolSpec", "Context", "CreateContextRequest", "DeleteAllContextsRequest", @@ -440,6 +456,7 @@ "ListMessagesRequest", "ListMessagesResponse", "SearchKnowledgeAnswer", + "SearchKnowledgeDebugInfo", "SearchKnowledgeRequest", "SearchKnowledgeResponse", "SuggestConversationSummaryRequest", @@ -612,15 +629,18 @@ "FaqAnswer", "GenerateSuggestionsResponse", "GetParticipantRequest", + "IngestedContextReferenceDebugInfo", "InputTextConfig", "IntentSuggestion", "KnowledgeAssistAnswer", + "KnowledgeAssistDebugInfo", "ListParticipantsRequest", "ListParticipantsResponse", "Message", "MessageAnnotation", "OutputAudio", "Participant", + "ServiceLatency", "SmartReplyAnswer", "StreamingAnalyzeContentRequest", "StreamingAnalyzeContentResponse", @@ -635,6 +655,7 @@ "SuggestSmartRepliesRequest", "SuggestSmartRepliesResponse", "UpdateParticipantRequest", + "DatastoreResponseReason", "CloudConversationDebuggingInfo", "DetectIntentRequest", "DetectIntentResponse", @@ -673,6 +694,7 @@ "UpdateToolRequest", "ToolCall", "ToolCallResult", + "ToolsetTool", "ValidationError", "ValidationResult", "CreateVersionRequest", diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/ces_app.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/ces_app.py new file mode 100644 index 000000000000..42eca4de90cb --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/ces_app.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dialogflow_v2.types import tool + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.v2", + manifest={ + "CesAppSpec", + }, +) + + +class CesAppSpec(proto.Message): + r"""Spec of CES app that the generator can choose from. + + Attributes: + ces_app (str): + Optional. Format: + ``projects//locations//apps/``. + confirmation_requirement (google.cloud.dialogflow_v2.types.Tool.ConfirmationRequirement): + Optional. Indicates whether the app requires + human confirmation. + """ + + ces_app: str = proto.Field( + proto.STRING, + number=1, + ) + confirmation_requirement: tool.Tool.ConfirmationRequirement = proto.Field( + proto.ENUM, + number=2, + enum=tool.Tool.ConfirmationRequirement, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/ces_tool.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/ces_tool.py new file mode 100644 index 000000000000..4686978d8fcc --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/ces_tool.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dialogflow_v2.types import tool + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.v2", + manifest={ + "CesToolSpec", + }, +) + + +class CesToolSpec(proto.Message): + r"""Spec of CES tool that the generator can choose from. + + Attributes: + ces_tool (str): + Optional. Format: + ``projects//locations//apps//tools/``. + confirmation_requirement (google.cloud.dialogflow_v2.types.Tool.ConfirmationRequirement): + Optional. Indicates whether the tool requires + human confirmation. + """ + + ces_tool: str = proto.Field( + proto.STRING, + number=1, + ) + confirmation_requirement: tool.Tool.ConfirmationRequirement = proto.Field( + proto.ENUM, + number=2, + enum=tool.Tool.ConfirmationRequirement, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation.py index ea07c9f178ca..d564dd2c3112 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation.py @@ -48,6 +48,7 @@ "GenerateStatelessSuggestionRequest", "GenerateStatelessSuggestionResponse", "SearchKnowledgeRequest", + "SearchKnowledgeDebugInfo", "SearchKnowledgeResponse", "SearchKnowledgeAnswer", "GenerateSuggestionsRequest", @@ -105,9 +106,18 @@ class Conversation(proto.Message): telephony_connection_info (google.cloud.dialogflow_v2.types.Conversation.TelephonyConnectionInfo): Output only. The telephony connection information. + initial_conversation_profile (google.cloud.dialogflow_v2.types.ConversationProfile): + Optional. Output only. The initial + conversation profile to be used to configure + this conversation, which is a copy of the + conversation profile config read at conversation + creation time. ingested_context_references (MutableMapping[str, google.cloud.dialogflow_v2.types.Conversation.ContextReference]): Output only. The context reference updates provided by external systems. + initial_generator_contexts (MutableMapping[str, google.cloud.dialogflow_v2.types.Conversation.GeneratorContext]): + Output only. A map with generator name as key + and generator context as value. """ class LifecycleState(proto.Enum): @@ -344,6 +354,48 @@ class ContentFormat(proto.Enum): message=timestamp_pb2.Timestamp, ) + class GeneratorContext(proto.Message): + r"""Represents the context of a generator. + + Attributes: + generator_type (google.cloud.dialogflow_v2.types.Conversation.GeneratorContext.GeneratorType): + Output only. The type of the generator. + """ + + class GeneratorType(proto.Enum): + r"""The available generator types. + + Values: + GENERATOR_TYPE_UNSPECIFIED (0): + Unspecified generator type. + FREE_FORM (1): + Free form generator type. + AGENT_COACHING (2): + Agent coaching generator type. + SUMMARIZATION (3): + Summarization generator type. + TRANSLATION (4): + Translation generator type. + AGENT_FEEDBACK (5): + Agent feedback generator type. + CUSTOMER_MESSAGE_GENERATION (6): + Customer message generation generator type. + """ + + GENERATOR_TYPE_UNSPECIFIED = 0 + FREE_FORM = 1 + AGENT_COACHING = 2 + SUMMARIZATION = 3 + TRANSLATION = 4 + AGENT_FEEDBACK = 5 + CUSTOMER_MESSAGE_GENERATION = 6 + + generator_type: "Conversation.GeneratorContext.GeneratorType" = proto.Field( + proto.ENUM, + number=1, + enum="Conversation.GeneratorContext.GeneratorType", + ) + name: str = proto.Field( proto.STRING, number=1, @@ -382,12 +434,25 @@ class ContentFormat(proto.Enum): number=10, message=TelephonyConnectionInfo, ) + initial_conversation_profile: gcd_conversation_profile.ConversationProfile = ( + proto.Field( + proto.MESSAGE, + number=15, + message=gcd_conversation_profile.ConversationProfile, + ) + ) ingested_context_references: MutableMapping[str, ContextReference] = proto.MapField( proto.STRING, proto.MESSAGE, number=17, message=ContextReference, ) + initial_generator_contexts: MutableMapping[str, GeneratorContext] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=18, + message=GeneratorContext, + ) class CreateConversationRequest(proto.Message): @@ -767,6 +832,9 @@ class Summary(proto.Message): sections. The key is the section's name and the value is the section's content. There is no specific format for the key or value. + sorted_text_sections (MutableSequence[google.cloud.dialogflow_v2.types.SuggestConversationSummaryResponse.Summary.SummarySection]): + Same as text_sections, but in an order that is consistent + with the order of the sections in the generator. answer_record (str): The name of the answer record. Format: @@ -778,6 +846,25 @@ class Summary(proto.Message): was not used to generate this summary. """ + class SummarySection(proto.Message): + r"""A component of the generated summary. + + Attributes: + section (str): + Output only. Name of the section. + summary (str): + Output only. Summary text for the section. + """ + + section: str = proto.Field( + proto.STRING, + number=1, + ) + summary: str = proto.Field( + proto.STRING, + number=2, + ) + text: str = proto.Field( proto.STRING, number=1, @@ -787,6 +874,13 @@ class Summary(proto.Message): proto.STRING, number=4, ) + sorted_text_sections: MutableSequence[ + "SuggestConversationSummaryResponse.Summary.SummarySection" + ] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="SuggestConversationSummaryResponse.Summary.SummarySection", + ) answer_record: str = proto.Field( proto.STRING, number=3, @@ -1478,6 +1572,75 @@ class FilterSpecs(proto.Message): ) +class SearchKnowledgeDebugInfo(proto.Message): + r"""Debug information related to SearchKnowledge feature. + + Attributes: + datastore_response_reason (google.cloud.dialogflow_v2.types.DatastoreResponseReason): + Response reason from datastore which + indicates data serving status or answer quality + degradation. + search_knowledge_behavior (google.cloud.dialogflow_v2.types.SearchKnowledgeDebugInfo.SearchKnowledgeBehavior): + Configured behaviors for SearchKnowledge. + ingested_context_reference_debug_info (google.cloud.dialogflow_v2.types.IngestedContextReferenceDebugInfo): + Information about parameters ingested for + search knowledge. + service_latency (google.cloud.dialogflow_v2.types.ServiceLatency): + The latency of the service. + """ + + class SearchKnowledgeBehavior(proto.Message): + r"""Configured behaviors for SearchKnowledge. + + Attributes: + answer_generation_rewriter_on (bool): + Whether data store agent rewriter was turned + on for the request. + end_user_metadata_included (bool): + Whether end_user_metadata is included in the data store + agent call. + third_party_connector_allowed (bool): + This field indicates whether third party + connectors are enabled for the project. Note + that this field only indicates if the project is + allowlisted for connectors. + """ + + answer_generation_rewriter_on: bool = proto.Field( + proto.BOOL, + number=1, + ) + end_user_metadata_included: bool = proto.Field( + proto.BOOL, + number=2, + ) + third_party_connector_allowed: bool = proto.Field( + proto.BOOL, + number=4, + ) + + datastore_response_reason: participant.DatastoreResponseReason = proto.Field( + proto.ENUM, + number=1, + enum=participant.DatastoreResponseReason, + ) + search_knowledge_behavior: SearchKnowledgeBehavior = proto.Field( + proto.MESSAGE, + number=2, + message=SearchKnowledgeBehavior, + ) + ingested_context_reference_debug_info: participant.IngestedContextReferenceDebugInfo = proto.Field( + proto.MESSAGE, + number=3, + message=participant.IngestedContextReferenceDebugInfo, + ) + service_latency: participant.ServiceLatency = proto.Field( + proto.MESSAGE, + number=4, + message=participant.ServiceLatency, + ) + + class SearchKnowledgeResponse(proto.Message): r"""The response message for [Conversations.SearchKnowledge][google.cloud.dialogflow.v2.Conversations.SearchKnowledge]. @@ -1489,6 +1652,8 @@ class SearchKnowledgeResponse(proto.Message): confidence. rewritten_query (str): The rewritten query used to search knowledge. + search_knowledge_debug_info (google.cloud.dialogflow_v2.types.SearchKnowledgeDebugInfo): + Debug info for SearchKnowledge. """ answers: MutableSequence["SearchKnowledgeAnswer"] = proto.RepeatedField( @@ -1500,6 +1665,11 @@ class SearchKnowledgeResponse(proto.Message): proto.STRING, number=3, ) + search_knowledge_debug_info: "SearchKnowledgeDebugInfo" = proto.Field( + proto.MESSAGE, + number=4, + message="SearchKnowledgeDebugInfo", + ) class SearchKnowledgeAnswer(proto.Message): diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_profile.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_profile.py index 2440aa429235..d6cd9137557c 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_profile.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_profile.py @@ -990,6 +990,30 @@ class MessageAnalysisConfig(proto.Message): [ListMessagesResponse.messages.SentimentAnalysisResult][google.cloud.dialogflow.v2.ListMessagesResponse.messages] If Pub/Sub notification is configured, result will be in [ConversationEvent.new_message_payload.SentimentAnalysisResult][google.cloud.dialogflow.v2.ConversationEvent.new_message_payload]. + enable_sentiment_analysis_v3 (bool): + Optional. Enables sentiment analysis for audio input and + conversation messages. If unspecified, defaults to false. If + this flag is set to true, other 'enable_sentiment_analysis' + fields will be ignored. + + Sentiment analysis inspects user input and identifies the + prevailing subjective opinion, especially to determine a + user's attitude as positive, negative, or neutral. + https://cloud.google.com/natural-language/docs/basics#sentiment_analysis + For + [Participants.StreamingAnalyzeContent][google.cloud.dialogflow.v2.Participants.StreamingAnalyzeContent] + method, result will be in + [StreamingAnalyzeContentResponse.message.SentimentAnalysisResult][google.cloud.dialogflow.v2.StreamingAnalyzeContentResponse.message]. + For + [Participants.AnalyzeContent][google.cloud.dialogflow.v2.Participants.AnalyzeContent] + method, result will be in + [AnalyzeContentResponse.message.SentimentAnalysisResult][google.cloud.dialogflow.v2.AnalyzeContentResponse.message] + For + [Conversations.ListMessages][google.cloud.dialogflow.v2.Conversations.ListMessages] + method, result will be in + [ListMessagesResponse.messages.SentimentAnalysisResult][google.cloud.dialogflow.v2.ListMessagesResponse.messages] + If Pub/Sub notification is configured, result will be in + [ConversationEvent.new_message_payload.SentimentAnalysisResult][google.cloud.dialogflow.v2.ConversationEvent.new_message_payload]. """ enable_entity_extraction: bool = proto.Field( @@ -1000,6 +1024,10 @@ class MessageAnalysisConfig(proto.Message): proto.BOOL, number=3, ) + enable_sentiment_analysis_v3: bool = proto.Field( + proto.BOOL, + number=5, + ) notification_config: "NotificationConfig" = proto.Field( proto.MESSAGE, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/generator.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/generator.py index d3ff39df4db1..318141defc28 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/generator.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/generator.py @@ -21,7 +21,12 @@ import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import proto # type: ignore -from google.cloud.dialogflow_v2.types import agent_coaching_instruction +from google.cloud.dialogflow_v2.types import ( + agent_coaching_instruction, + ces_app, + ces_tool, + toolset, +) from google.cloud.dialogflow_v2.types import tool_call as gcd_tool_call __protobuf__ = proto.module( @@ -675,6 +680,15 @@ class Generator(proto.Message): Optional. Configuration for suggestion deduping. This is only applicable to AI Coach feature. + toolset_tools (MutableSequence[google.cloud.dialogflow_v2.types.ToolsetTool]): + Optional. List of CES toolset specs that the + generator can choose from. + ces_tool_specs (MutableSequence[google.cloud.dialogflow_v2.types.CesToolSpec]): + Optional. List of CES tool specs that the + generator can choose from. + ces_app_specs (MutableSequence[google.cloud.dialogflow_v2.types.CesAppSpec]): + Optional. List of CES app specs that the + generator can choose from. """ name: str = proto.Field( @@ -737,6 +751,21 @@ class Generator(proto.Message): number=23, message="SuggestionDedupingConfig", ) + toolset_tools: MutableSequence[toolset.ToolsetTool] = proto.RepeatedField( + proto.MESSAGE, + number=27, + message=toolset.ToolsetTool, + ) + ces_tool_specs: MutableSequence[ces_tool.CesToolSpec] = proto.RepeatedField( + proto.MESSAGE, + number=28, + message=ces_tool.CesToolSpec, + ) + ces_app_specs: MutableSequence[ces_app.CesAppSpec] = proto.RepeatedField( + proto.MESSAGE, + number=29, + message=ces_app.CesAppSpec, + ) class FreeFormSuggestion(proto.Message): diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/participant.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/participant.py index d4cf47ffa434..17007e4941ab 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/participant.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/participant.py @@ -29,6 +29,7 @@ __protobuf__ = proto.module( package="google.cloud.dialogflow.v2", manifest={ + "DatastoreResponseReason", "Participant", "Message", "CreateParticipantRequest", @@ -64,11 +65,57 @@ "AssistQueryParameters", "SuggestKnowledgeAssistRequest", "SuggestKnowledgeAssistResponse", + "IngestedContextReferenceDebugInfo", + "ServiceLatency", + "KnowledgeAssistDebugInfo", "KnowledgeAssistAnswer", }, ) +class DatastoreResponseReason(proto.Enum): + r"""Response reason from datastore which indicates data serving + status or answer quality degradation. + + Values: + DATASTORE_RESPONSE_REASON_UNSPECIFIED (0): + Default value. + NONE (1): + No specific response reason from datastore. + SEARCH_OUT_OF_QUOTA (2): + Search is blocked due to out of quota. + SEARCH_EMPTY_RESULTS (3): + Search returns empty results. + ANSWER_GENERATION_GEN_AI_DISABLED (4): + Generative AI is disabled. + ANSWER_GENERATION_OUT_OF_QUOTA (5): + Answer generation is blocked due to out of + quota. + ANSWER_GENERATION_ERROR (6): + Answer generation encounters an error. + ANSWER_GENERATION_NOT_ENOUGH_INFO (7): + Answer generation does not have enough + information to generate answer. + ANSWER_GENERATION_RAI_FAILED (8): + Answer generation is blocked by RAI + (Responsible AI) failure. + ANSWER_GENERATION_NOT_GROUNDED (9): + Answer generation is not grounded on reliable + sources. + """ + + DATASTORE_RESPONSE_REASON_UNSPECIFIED = 0 + NONE = 1 + SEARCH_OUT_OF_QUOTA = 2 + SEARCH_EMPTY_RESULTS = 3 + ANSWER_GENERATION_GEN_AI_DISABLED = 4 + ANSWER_GENERATION_OUT_OF_QUOTA = 5 + ANSWER_GENERATION_ERROR = 6 + ANSWER_GENERATION_NOT_ENOUGH_INFO = 7 + ANSWER_GENERATION_RAI_FAILED = 8 + ANSWER_GENERATION_NOT_GROUNDED = 9 + + class Participant(proto.Message): r"""Represents a conversation participant (human agent, virtual agent, end-user). @@ -794,6 +841,11 @@ class StreamingAnalyzeContentRequest(proto.Message): only one final response even if some ``Fulfillment``\ s in Dialogflow CX agent have been configured to return partial responses. + output_multiple_utterances (bool): + Optional. If multiple uttereances are + detected in the audio stream, process them + individually instead of stitching them together + to form a single utterance. enable_debugging_info (bool): If true, ``StreamingAnalyzeContentResponse.debugging_info`` will get populated. @@ -859,6 +911,10 @@ class StreamingAnalyzeContentRequest(proto.Message): proto.BOOL, number=12, ) + output_multiple_utterances: bool = proto.Field( + proto.BOOL, + number=18, + ) enable_debugging_info: bool = proto.Field( proto.BOOL, number=19, @@ -1989,6 +2045,406 @@ class SuggestKnowledgeAssistResponse(proto.Message): ) +class IngestedContextReferenceDebugInfo(proto.Message): + r"""Debug information related to ingested context reference. + + Attributes: + project_not_allowlisted (bool): + Indicates if the project is allowlisted to + use ingested context reference. + context_reference_retrieved (bool): + The status of context_reference retrieval from database. + ingested_parameters_debug_info (MutableSequence[google.cloud.dialogflow_v2.types.IngestedContextReferenceDebugInfo.IngestedParameterDebugInfo]): + Parameters ingested from the context + reference. + """ + + class IngestedParameterDebugInfo(proto.Message): + r"""Debug information related to ingested parameters from context + reference. + + Attributes: + parameter (str): + The name of the parameter in the context + reference. + ingestion_status (google.cloud.dialogflow_v2.types.IngestedContextReferenceDebugInfo.IngestedParameterDebugInfo.IngestionStatus): + The ingestion status for this specific + parameter. + """ + + class IngestionStatus(proto.Enum): + r"""Enum representing the various states of parameter ingestion. + + Values: + INGESTION_STATUS_UNSPECIFIED (0): + Default value, indicates that the ingestion + status is not specified. + INGESTION_STATUS_SUCCEEDED (1): + Indicates that the parameter was successfully + ingested. + INGESTION_STATUS_CONTEXT_NOT_AVAILABLE (2): + Indicates that the parameter was not + available for ingestion. + INGESTION_STATUS_PARSE_FAILED (3): + Indicates that there was a failure parsing + the parameter content. + INGESTION_STATUS_INVALID_ENTRY (4): + Indicates that the context reference had an + unexpected number of content entries as Context + reference should only have one entry. + INGESTION_STATUS_INVALID_FORMAT (5): + Indicates that the context reference content + was not in the expected format (e.g., JSON). + INGESTION_STATUS_LANGUAGE_MISMATCH (6): + Indicates that the context reference language + does not match the conversation language. + """ + + INGESTION_STATUS_UNSPECIFIED = 0 + INGESTION_STATUS_SUCCEEDED = 1 + INGESTION_STATUS_CONTEXT_NOT_AVAILABLE = 2 + INGESTION_STATUS_PARSE_FAILED = 3 + INGESTION_STATUS_INVALID_ENTRY = 4 + INGESTION_STATUS_INVALID_FORMAT = 5 + INGESTION_STATUS_LANGUAGE_MISMATCH = 6 + + parameter: str = proto.Field( + proto.STRING, + number=1, + ) + ingestion_status: "IngestedContextReferenceDebugInfo.IngestedParameterDebugInfo.IngestionStatus" = proto.Field( + proto.ENUM, + number=2, + enum="IngestedContextReferenceDebugInfo.IngestedParameterDebugInfo.IngestionStatus", + ) + + project_not_allowlisted: bool = proto.Field( + proto.BOOL, + number=1, + ) + context_reference_retrieved: bool = proto.Field( + proto.BOOL, + number=2, + ) + ingested_parameters_debug_info: MutableSequence[IngestedParameterDebugInfo] = ( + proto.RepeatedField( + proto.MESSAGE, + number=3, + message=IngestedParameterDebugInfo, + ) + ) + + +class ServiceLatency(proto.Message): + r"""Message to represent the latency of the service. + + Attributes: + internal_service_latencies (MutableSequence[google.cloud.dialogflow_v2.types.ServiceLatency.InternalServiceLatency]): + A list of internal service latencies. + """ + + class InternalServiceLatency(proto.Message): + r"""Message to represent the latency of an internal service. + + Attributes: + step (str): + The name of the internal service. + latency_ms (float): + The latency of the internal service in + milliseconds. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The start time of the internal service. + complete_time (google.protobuf.timestamp_pb2.Timestamp): + The completion time of the internal service. + """ + + step: str = proto.Field( + proto.STRING, + number=1, + ) + latency_ms: float = proto.Field( + proto.FLOAT, + number=2, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + complete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + internal_service_latencies: MutableSequence[InternalServiceLatency] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=InternalServiceLatency, + ) + ) + + +class KnowledgeAssistDebugInfo(proto.Message): + r"""Debug information related to Knowledge Assist feature. + + Attributes: + query_generation_failure_reason (google.cloud.dialogflow_v2.types.KnowledgeAssistDebugInfo.QueryGenerationFailureReason): + Reason for query generation. + query_categorization_failure_reason (google.cloud.dialogflow_v2.types.KnowledgeAssistDebugInfo.QueryCategorizationFailureReason): + Reason for query categorization. + datastore_response_reason (google.cloud.dialogflow_v2.types.DatastoreResponseReason): + Response reason from datastore which + indicates data serving status or answer quality + degradation. + knowledge_assist_behavior (google.cloud.dialogflow_v2.types.KnowledgeAssistDebugInfo.KnowledgeAssistBehavior): + Configured behaviors for Knowedge Assist. + ingested_context_reference_debug_info (google.cloud.dialogflow_v2.types.IngestedContextReferenceDebugInfo): + Information about parameters ingested for + search knowledge. + service_latency (google.cloud.dialogflow_v2.types.ServiceLatency): + The latency of the service. + """ + + class QueryGenerationFailureReason(proto.Enum): + r"""Reason for query generation failure. + + Values: + QUERY_GENERATION_FAILURE_REASON_UNSPECIFIED (0): + Default value. + QUERY_GENERATION_OUT_OF_QUOTA (1): + Query generation is blocked due to out of + quota. + QUERY_GENERATION_FAILED (2): + Call to Knowedge Assist query generation + model fails. + QUERY_GENERATION_NO_QUERY_GENERATED (3): + Query generation model decides that there is + no new topic change or there has been similar + queries generated in the previous turns. + QUERY_GENERATION_RAI_FAILED (4): + Knowedge Assist generated query is blocked by + RAI (Responsible AI). + NOT_IN_ALLOWLIST (5): + Query generation is blocked by Knowledge + Assist conversation profile level / agent id + level filtering. + QUERY_GENERATION_QUERY_REDACTED (6): + The generated query is blocked due to + redaction. + QUERY_GENERATION_LLM_RESPONSE_PARSE_FAILED (10): + Query generation failed due to LLM response + parse failure. + QUERY_GENERATION_EMPTY_CONVERSATION (11): + The conversation has no messages. + QUERY_GENERATION_EMPTY_LAST_MESSAGE (12): + The last message in the conversation is + empty. + QUERY_GENERATION_TRIGGERING_EVENT_CONDITION_NOT_MET (13): + The trigger event condition is not met. This occurs in the + following scenarios: + + 1. The trigger_event is CUSTOMER_MESSAGE or UNSPECIFIED, but + the last message is not from the customer. + 2. The trigger_event is AGENT_MESSAGE, but the last message + is not from the agent. + """ + + QUERY_GENERATION_FAILURE_REASON_UNSPECIFIED = 0 + QUERY_GENERATION_OUT_OF_QUOTA = 1 + QUERY_GENERATION_FAILED = 2 + QUERY_GENERATION_NO_QUERY_GENERATED = 3 + QUERY_GENERATION_RAI_FAILED = 4 + NOT_IN_ALLOWLIST = 5 + QUERY_GENERATION_QUERY_REDACTED = 6 + QUERY_GENERATION_LLM_RESPONSE_PARSE_FAILED = 10 + QUERY_GENERATION_EMPTY_CONVERSATION = 11 + QUERY_GENERATION_EMPTY_LAST_MESSAGE = 12 + QUERY_GENERATION_TRIGGERING_EVENT_CONDITION_NOT_MET = 13 + + class QueryCategorizationFailureReason(proto.Enum): + r"""Reason for query categorization failure. + + Values: + QUERY_CATEGORIZATION_FAILURE_REASON_UNSPECIFIED (0): + Default value. + QUERY_CATEGORIZATION_INVALID_CONFIG (1): + Vertex AI Search config supplied for query + categorization is invalid. + QUERY_CATEGORIZATION_RESULT_NOT_FOUND (2): + Vertex AI Search result does not contain a + query categorization result. + QUERY_CATEGORIZATION_FAILED (3): + Vertex AI Search call fails. + """ + + QUERY_CATEGORIZATION_FAILURE_REASON_UNSPECIFIED = 0 + QUERY_CATEGORIZATION_INVALID_CONFIG = 1 + QUERY_CATEGORIZATION_RESULT_NOT_FOUND = 2 + QUERY_CATEGORIZATION_FAILED = 3 + + class KnowledgeAssistBehavior(proto.Message): + r"""Configured behaviors for Knowedge Assist. + + Attributes: + answer_generation_rewriter_on (bool): + Whether data store agent rewriter was turned + off for the request. + end_user_metadata_included (bool): + Whether end_user_metadata is included in the data store + agent call. + return_query_only (bool): + Whether customers configured to return query + only in the conversation profile. + use_pubsub_delivery (bool): + Whether customers configured to use pubsub to + deliver. + disable_sync_delivery (bool): + Whether customers configured to disable the + synchronous delivery of Knowedge Assist + response. + previous_queries_included (bool): + Whether previously suggested queries are + included in the query generation process. + use_translated_message (bool): + Translated message is included in query + generation process. + use_custom_safety_filter_level (bool): + Safety filter is adjusted by user. + conversation_transcript_has_mixed_languages (bool): + Conversation transcript has mixed languages. + query_generation_agent_language_mismatch (bool): + Whether the agent language from the + translation generator mismatches the end-user + language. + query_generation_end_user_language_mismatch (bool): + Whether the end-user language from the + translation generator mismatches the end-user + language. + third_party_connector_allowed (bool): + This field indicates whether third party + connectors are enabled for the project + multiple_queries_generated (bool): + Indicates that the query generation model + generated multiple queries. + query_contained_search_context (bool): + Indicates that the generated query contains + search context. + invalid_items_query_suggestion_skipped (bool): + Indicates that invalid items were skipped + when parsing the LLM response. + primary_query_redacted_and_replaced (bool): + True if the primary suggested query was + redacted and replaced by an additional query. + appended_search_context_count (int): + The number of search contexts appended to the + query. + """ + + answer_generation_rewriter_on: bool = proto.Field( + proto.BOOL, + number=1, + ) + end_user_metadata_included: bool = proto.Field( + proto.BOOL, + number=2, + ) + return_query_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + use_pubsub_delivery: bool = proto.Field( + proto.BOOL, + number=5, + ) + disable_sync_delivery: bool = proto.Field( + proto.BOOL, + number=6, + ) + previous_queries_included: bool = proto.Field( + proto.BOOL, + number=7, + ) + use_translated_message: bool = proto.Field( + proto.BOOL, + number=8, + ) + use_custom_safety_filter_level: bool = proto.Field( + proto.BOOL, + number=9, + ) + conversation_transcript_has_mixed_languages: bool = proto.Field( + proto.BOOL, + number=10, + ) + query_generation_agent_language_mismatch: bool = proto.Field( + proto.BOOL, + number=11, + ) + query_generation_end_user_language_mismatch: bool = proto.Field( + proto.BOOL, + number=12, + ) + third_party_connector_allowed: bool = proto.Field( + proto.BOOL, + number=13, + ) + multiple_queries_generated: bool = proto.Field( + proto.BOOL, + number=14, + ) + query_contained_search_context: bool = proto.Field( + proto.BOOL, + number=15, + ) + invalid_items_query_suggestion_skipped: bool = proto.Field( + proto.BOOL, + number=16, + ) + primary_query_redacted_and_replaced: bool = proto.Field( + proto.BOOL, + number=17, + ) + appended_search_context_count: int = proto.Field( + proto.INT32, + number=18, + ) + + query_generation_failure_reason: QueryGenerationFailureReason = proto.Field( + proto.ENUM, + number=1, + enum=QueryGenerationFailureReason, + ) + query_categorization_failure_reason: QueryCategorizationFailureReason = proto.Field( + proto.ENUM, + number=2, + enum=QueryCategorizationFailureReason, + ) + datastore_response_reason: "DatastoreResponseReason" = proto.Field( + proto.ENUM, + number=3, + enum="DatastoreResponseReason", + ) + knowledge_assist_behavior: KnowledgeAssistBehavior = proto.Field( + proto.MESSAGE, + number=4, + message=KnowledgeAssistBehavior, + ) + ingested_context_reference_debug_info: "IngestedContextReferenceDebugInfo" = ( + proto.Field( + proto.MESSAGE, + number=5, + message="IngestedContextReferenceDebugInfo", + ) + ) + service_latency: "ServiceLatency" = proto.Field( + proto.MESSAGE, + number=6, + message="ServiceLatency", + ) + + class KnowledgeAssistAnswer(proto.Message): r"""Represents a Knowledge Assist answer. @@ -2004,6 +2460,9 @@ class KnowledgeAssistAnswer(proto.Message): answer_record (str): The name of the answer record. Format: ``projects//locations//answer Records/``. + knowledge_assist_debug_info (google.cloud.dialogflow_v2.types.KnowledgeAssistDebugInfo): + Debug information related to Knowledge Assist + feature. """ class SuggestedQuery(proto.Message): @@ -2139,6 +2598,11 @@ class Snippet(proto.Message): proto.STRING, number=3, ) + knowledge_assist_debug_info: "KnowledgeAssistDebugInfo" = proto.Field( + proto.MESSAGE, + number=7, + message="KnowledgeAssistDebugInfo", + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/tool_call.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/tool_call.py index 912568c6f8d0..d1edc4e4ad90 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/tool_call.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/tool_call.py @@ -34,6 +34,10 @@ class ToolCall(proto.Message): r"""Represents a call of a specific tool's action with the specified inputs. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -43,6 +47,21 @@ class ToolCall(proto.Message): associated with this call. Format: ``projects//locations//tools/``. + This field is a member of `oneof`_ ``source``. + ces_tool (str): + Optional. CES tool name for this call. Format: + ``projects//locations//apps//tools/``. + + This field is a member of `oneof`_ ``source``. + ces_toolset (str): + Optional. CES toolset name for this call. Format: + ``projects//locations//apps//toolsets/ToolsetID>``. + + This field is a member of `oneof`_ ``source``. + ces_app (str): + Optional. CES app name for this call. Format: + ``projects//locations//apps/``. + This field is a member of `oneof`_ ``source``. tool_display_name (str): Optional. A human readable short name of the @@ -86,6 +105,21 @@ class State(proto.Enum): number=1, oneof="source", ) + ces_tool: str = proto.Field( + proto.STRING, + number=11, + oneof="source", + ) + ces_toolset: str = proto.Field( + proto.STRING, + number=12, + oneof="source", + ) + ces_app: str = proto.Field( + proto.STRING, + number=8, + oneof="source", + ) tool_display_name: str = proto.Field( proto.STRING, number=9, @@ -135,6 +169,21 @@ class ToolCallResult(proto.Message): associated with this call. Format: ``projects//locations//tools/``. + This field is a member of `oneof`_ ``source``. + ces_toolset (str): + Optional. CES toolset name for this call. Format: + ``projects//locations//apps//toolsets/ToolsetID>``. + + This field is a member of `oneof`_ ``source``. + ces_tool (str): + Optional. CES tool name for this call. Format: + ``projects//locations//apps//tools/``. + + This field is a member of `oneof`_ ``source``. + ces_app (str): + Optional. CES app name for this call. Format: + ``projects//locations//apps/``. + This field is a member of `oneof`_ ``source``. action (str): Optional. The name of the tool's action @@ -180,6 +229,21 @@ class Error(proto.Message): number=1, oneof="source", ) + ces_toolset: str = proto.Field( + proto.STRING, + number=13, + oneof="source", + ) + ces_tool: str = proto.Field( + proto.STRING, + number=12, + oneof="source", + ) + ces_app: str = proto.Field( + proto.STRING, + number=11, + oneof="source", + ) action: str = proto.Field( proto.STRING, number=2, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/toolset.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/toolset.py new file mode 100644 index 000000000000..1b5b51b4a66e --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/toolset.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dialogflow_v2.types import tool + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.v2", + manifest={ + "ToolsetTool", + }, +) + + +class ToolsetTool(proto.Message): + r"""A tool that is created from a toolset. + + Attributes: + toolset (str): + Required. The name of the toolset to retrieve the schema + for. Format: + ``projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}`` + operation_id (str): + Optional. The operationId field of the + OpenAPI endpoint. The operationId must be + present in the toolset's definition. + confirmation_requirement (google.cloud.dialogflow_v2.types.Tool.ConfirmationRequirement): + Optional. Indicates whether the tool requires + human confirmation. + """ + + toolset: str = proto.Field( + proto.STRING, + number=1, + ) + operation_id: str = proto.Field( + proto.STRING, + number=2, + ) + confirmation_requirement: tool.Tool.ConfirmationRequirement = proto.Field( + proto.ENUM, + number=3, + enum=tool.Tool.ConfirmationRequirement, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/__init__.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/__init__.py index cd836430eaab..ec1eae574285 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/__init__.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/__init__.py @@ -105,6 +105,8 @@ TelephonyDtmfEvents, VoiceSelectionParams, ) +from .types.ces_app import CesAppSpec +from .types.ces_tool import CesToolSpec from .types.context import ( Context, CreateContextRequest, @@ -136,6 +138,7 @@ ListMessagesRequest, ListMessagesResponse, SearchKnowledgeAnswer, + SearchKnowledgeDebugInfo, SearchKnowledgeRequest, SearchKnowledgeResponse, SuggestConversationSummaryRequest, @@ -290,15 +293,18 @@ CompileSuggestionRequest, CompileSuggestionResponse, CreateParticipantRequest, + DatastoreResponseReason, DialogflowAssistAnswer, DtmfParameters, FaqAnswer, GenerateSuggestionsResponse, GetParticipantRequest, + IngestedContextReferenceDebugInfo, InputTextConfig, IntentInput, IntentSuggestion, KnowledgeAssistAnswer, + KnowledgeAssistDebugInfo, ListParticipantsRequest, ListParticipantsResponse, ListSuggestionsRequest, @@ -308,6 +314,7 @@ OutputAudio, Participant, ResponseMessage, + ServiceLatency, SmartReplyAnswer, StreamingAnalyzeContentRequest, StreamingAnalyzeContentResponse, @@ -380,6 +387,7 @@ UpdateToolRequest, ) from .types.tool_call import ToolCall, ToolCallResult +from .types.toolset import ToolsetTool from .types.validation_result import ValidationError, ValidationResult from .types.version import ( CreateVersionRequest, @@ -541,6 +549,8 @@ def _get_version(dependency_name): "BatchUpdateIntentsResponse", "BidiStreamingAnalyzeContentRequest", "BidiStreamingAnalyzeContentResponse", + "CesAppSpec", + "CesToolSpec", "ClearSuggestionFeatureConfigOperationMetadata", "ClearSuggestionFeatureConfigRequest", "CloudConversationDebuggingInfo", @@ -574,6 +584,7 @@ def _get_version(dependency_name): "CreateToolRequest", "CreateVersionRequest", "CustomPronunciationParams", + "DatastoreResponseReason", "DeleteAgentRequest", "DeleteAllContextsRequest", "DeleteContextRequest", @@ -662,6 +673,7 @@ def _get_version(dependency_name): "InferenceParameter", "IngestContextReferencesRequest", "IngestContextReferencesResponse", + "IngestedContextReferenceDebugInfo", "InitializeEncryptionSpecMetadata", "InitializeEncryptionSpecRequest", "InitializeEncryptionSpecResponse", @@ -675,6 +687,7 @@ def _get_version(dependency_name): "IntentsClient", "KnowledgeAnswers", "KnowledgeAssistAnswer", + "KnowledgeAssistDebugInfo", "KnowledgeBase", "KnowledgeBasesClient", "KnowledgeOperationMetadata", @@ -739,11 +752,13 @@ def _get_version(dependency_name): "SearchAgentsRequest", "SearchAgentsResponse", "SearchKnowledgeAnswer", + "SearchKnowledgeDebugInfo", "SearchKnowledgeRequest", "SearchKnowledgeResponse", "Sentiment", "SentimentAnalysisRequestConfig", "SentimentAnalysisResult", + "ServiceLatency", "SessionEntityType", "SessionEntityTypesClient", "SessionsClient", @@ -794,6 +809,7 @@ def _get_version(dependency_name): "ToolCall", "ToolCallResult", "ToolsClient", + "ToolsetTool", "TrainAgentRequest", "TriggerEvent", "UndeletePhoneNumberRequest", diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/async_client.py index 43508ed49a9d..8fb49700ec10 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/async_client.py @@ -85,6 +85,8 @@ class AnswerRecordsAsyncClient: parse_answer_record_path = staticmethod( AnswerRecordsClient.parse_answer_record_path ) + app_path = staticmethod(AnswerRecordsClient.app_path) + parse_app_path = staticmethod(AnswerRecordsClient.parse_app_path) context_path = staticmethod(AnswerRecordsClient.context_path) parse_context_path = staticmethod(AnswerRecordsClient.parse_context_path) document_path = staticmethod(AnswerRecordsClient.document_path) @@ -93,6 +95,10 @@ class AnswerRecordsAsyncClient: parse_intent_path = staticmethod(AnswerRecordsClient.parse_intent_path) tool_path = staticmethod(AnswerRecordsClient.tool_path) parse_tool_path = staticmethod(AnswerRecordsClient.parse_tool_path) + tool_path = staticmethod(AnswerRecordsClient.tool_path) + parse_tool_path = staticmethod(AnswerRecordsClient.parse_tool_path) + toolset_path = staticmethod(AnswerRecordsClient.toolset_path) + parse_toolset_path = staticmethod(AnswerRecordsClient.parse_toolset_path) common_billing_account_path = staticmethod( AnswerRecordsClient.common_billing_account_path ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/client.py index ff8fbd219e65..c01800ab6401 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/client.py @@ -252,6 +252,28 @@ def parse_answer_record_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def app_path( + project: str, + location: str, + app: str, + ) -> str: + """Returns a fully-qualified app string.""" + return "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + + @staticmethod + def parse_app_path(path: str) -> Dict[str, str]: + """Parses a app path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def context_path( project: str, @@ -313,6 +335,30 @@ def parse_intent_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/agent/intents/(?P.+?)$", path) return m.groupdict() if m else {} + @staticmethod + def tool_path( + project: str, + location: str, + app: str, + tool: str, + ) -> str: + """Returns a fully-qualified tool string.""" + return "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + + @staticmethod + def parse_tool_path(path: str) -> Dict[str, str]: + """Parses a tool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/tools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def tool_path( project: str, @@ -335,6 +381,30 @@ def parse_tool_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def toolset_path( + project: str, + location: str, + app: str, + toolset: str, + ) -> str: + """Returns a fully-qualified toolset string.""" + return "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + + @staticmethod + def parse_toolset_path(path: str) -> Dict[str, str]: + """Parses a toolset path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/toolsets/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/async_client.py index 440358830bd0..f4970d5604d9 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/async_client.py @@ -49,7 +49,12 @@ from google.longrunning import operations_pb2 # type: ignore from google.cloud.dialogflow_v2beta1.services.conversations import pagers -from google.cloud.dialogflow_v2beta1.types import conversation, generator, participant +from google.cloud.dialogflow_v2beta1.types import ( + conversation, + conversation_profile, + generator, + participant, +) from google.cloud.dialogflow_v2beta1.types import conversation as gcd_conversation from .client import ConversationsClient @@ -86,6 +91,8 @@ class ConversationsAsyncClient: parse_answer_record_path = staticmethod( ConversationsClient.parse_answer_record_path ) + app_path = staticmethod(ConversationsClient.app_path) + parse_app_path = staticmethod(ConversationsClient.parse_app_path) conversation_path = staticmethod(ConversationsClient.conversation_path) parse_conversation_path = staticmethod(ConversationsClient.parse_conversation_path) conversation_model_path = staticmethod(ConversationsClient.conversation_model_path) @@ -120,6 +127,10 @@ class ConversationsAsyncClient: parse_phrase_set_path = staticmethod(ConversationsClient.parse_phrase_set_path) tool_path = staticmethod(ConversationsClient.tool_path) parse_tool_path = staticmethod(ConversationsClient.parse_tool_path) + tool_path = staticmethod(ConversationsClient.tool_path) + parse_tool_path = staticmethod(ConversationsClient.parse_tool_path) + toolset_path = staticmethod(ConversationsClient.toolset_path) + parse_toolset_path = staticmethod(ConversationsClient.parse_toolset_path) common_billing_account_path = staticmethod( ConversationsClient.common_billing_account_path ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/client.py index 296f364ac5c2..f6c516eafed2 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/client.py @@ -66,7 +66,12 @@ from google.longrunning import operations_pb2 # type: ignore from google.cloud.dialogflow_v2beta1.services.conversations import pagers -from google.cloud.dialogflow_v2beta1.types import conversation, generator, participant +from google.cloud.dialogflow_v2beta1.types import ( + conversation, + conversation_profile, + generator, + participant, +) from google.cloud.dialogflow_v2beta1.types import conversation as gcd_conversation from .transports.base import DEFAULT_CLIENT_INFO, ConversationsTransport @@ -267,6 +272,28 @@ def parse_answer_record_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def app_path( + project: str, + location: str, + app: str, + ) -> str: + """Returns a fully-qualified app string.""" + return "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + + @staticmethod + def parse_app_path(path: str) -> Dict[str, str]: + """Parses a app path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def conversation_path( project: str, @@ -483,6 +510,30 @@ def parse_phrase_set_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def tool_path( + project: str, + location: str, + app: str, + tool: str, + ) -> str: + """Returns a fully-qualified tool string.""" + return "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + + @staticmethod + def parse_tool_path(path: str) -> Dict[str, str]: + """Parses a tool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/tools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def tool_path( project: str, @@ -505,6 +556,30 @@ def parse_tool_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def toolset_path( + project: str, + location: str, + app: str, + toolset: str, + ) -> str: + """Returns a fully-qualified toolset string.""" + return "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + + @staticmethod + def parse_toolset_path(path: str) -> Dict[str, str]: + """Parses a toolset path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/toolsets/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generator_evaluations/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generator_evaluations/async_client.py index f1a0bdcf0d7c..b08d2a5f4127 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generator_evaluations/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generator_evaluations/async_client.py @@ -86,6 +86,8 @@ class GeneratorEvaluationsAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = GeneratorEvaluationsClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = GeneratorEvaluationsClient._DEFAULT_UNIVERSE + app_path = staticmethod(GeneratorEvaluationsClient.app_path) + parse_app_path = staticmethod(GeneratorEvaluationsClient.parse_app_path) generator_path = staticmethod(GeneratorEvaluationsClient.generator_path) parse_generator_path = staticmethod(GeneratorEvaluationsClient.parse_generator_path) generator_evaluation_path = staticmethod( @@ -96,6 +98,10 @@ class GeneratorEvaluationsAsyncClient: ) tool_path = staticmethod(GeneratorEvaluationsClient.tool_path) parse_tool_path = staticmethod(GeneratorEvaluationsClient.parse_tool_path) + tool_path = staticmethod(GeneratorEvaluationsClient.tool_path) + parse_tool_path = staticmethod(GeneratorEvaluationsClient.parse_tool_path) + toolset_path = staticmethod(GeneratorEvaluationsClient.toolset_path) + parse_toolset_path = staticmethod(GeneratorEvaluationsClient.parse_toolset_path) common_billing_account_path = staticmethod( GeneratorEvaluationsClient.common_billing_account_path ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generator_evaluations/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generator_evaluations/client.py index d6abe20edb25..79c94105abf7 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generator_evaluations/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generator_evaluations/client.py @@ -239,6 +239,28 @@ def transport(self) -> GeneratorEvaluationsTransport: """ return self._transport + @staticmethod + def app_path( + project: str, + location: str, + app: str, + ) -> str: + """Returns a fully-qualified app string.""" + return "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + + @staticmethod + def parse_app_path(path: str) -> Dict[str, str]: + """Parses a app path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def generator_path( project: str, @@ -285,6 +307,30 @@ def parse_generator_evaluation_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def tool_path( + project: str, + location: str, + app: str, + tool: str, + ) -> str: + """Returns a fully-qualified tool string.""" + return "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + + @staticmethod + def parse_tool_path(path: str) -> Dict[str, str]: + """Parses a tool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/tools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def tool_path( project: str, @@ -307,6 +353,30 @@ def parse_tool_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def toolset_path( + project: str, + location: str, + app: str, + toolset: str, + ) -> str: + """Returns a fully-qualified toolset string.""" + return "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + + @staticmethod + def parse_toolset_path(path: str) -> Dict[str, str]: + """Parses a toolset path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/toolsets/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/async_client.py index 5e1ac807480a..b7d1053e4945 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/async_client.py @@ -50,7 +50,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.cloud.dialogflow_v2beta1.services.generators import pagers -from google.cloud.dialogflow_v2beta1.types import generator +from google.cloud.dialogflow_v2beta1.types import ces_app, ces_tool, generator, toolset from google.cloud.dialogflow_v2beta1.types import generator as gcd_generator from .client import GeneratorsClient @@ -84,10 +84,16 @@ class GeneratorsAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = GeneratorsClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = GeneratorsClient._DEFAULT_UNIVERSE + app_path = staticmethod(GeneratorsClient.app_path) + parse_app_path = staticmethod(GeneratorsClient.parse_app_path) generator_path = staticmethod(GeneratorsClient.generator_path) parse_generator_path = staticmethod(GeneratorsClient.parse_generator_path) tool_path = staticmethod(GeneratorsClient.tool_path) parse_tool_path = staticmethod(GeneratorsClient.parse_tool_path) + tool_path = staticmethod(GeneratorsClient.tool_path) + parse_tool_path = staticmethod(GeneratorsClient.parse_tool_path) + toolset_path = staticmethod(GeneratorsClient.toolset_path) + parse_toolset_path = staticmethod(GeneratorsClient.parse_toolset_path) common_billing_account_path = staticmethod( GeneratorsClient.common_billing_account_path ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/client.py index 127c45a5f3c7..79c19c9599e7 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/client.py @@ -67,7 +67,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.cloud.dialogflow_v2beta1.services.generators import pagers -from google.cloud.dialogflow_v2beta1.types import generator +from google.cloud.dialogflow_v2beta1.types import ces_app, ces_tool, generator, toolset from google.cloud.dialogflow_v2beta1.types import generator as gcd_generator from .transports.base import DEFAULT_CLIENT_INFO, GeneratorsTransport @@ -237,6 +237,28 @@ def transport(self) -> GeneratorsTransport: """ return self._transport + @staticmethod + def app_path( + project: str, + location: str, + app: str, + ) -> str: + """Returns a fully-qualified app string.""" + return "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + + @staticmethod + def parse_app_path(path: str) -> Dict[str, str]: + """Parses a app path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def generator_path( project: str, @@ -259,6 +281,30 @@ def parse_generator_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def tool_path( + project: str, + location: str, + app: str, + tool: str, + ) -> str: + """Returns a fully-qualified tool string.""" + return "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + + @staticmethod + def parse_tool_path(path: str) -> Dict[str, str]: + """Parses a tool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/tools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def tool_path( project: str, @@ -281,6 +327,30 @@ def parse_tool_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def toolset_path( + project: str, + location: str, + app: str, + toolset: str, + ) -> str: + """Returns a fully-qualified toolset string.""" + return "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + + @staticmethod + def parse_toolset_path(path: str) -> Dict[str, str]: + """Parses a toolset path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/toolsets/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/async_client.py index ea752c961ac2..c4291d75ad12 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/async_client.py @@ -86,6 +86,8 @@ class ParticipantsAsyncClient: answer_record_path = staticmethod(ParticipantsClient.answer_record_path) parse_answer_record_path = staticmethod(ParticipantsClient.parse_answer_record_path) + app_path = staticmethod(ParticipantsClient.app_path) + parse_app_path = staticmethod(ParticipantsClient.parse_app_path) context_path = staticmethod(ParticipantsClient.context_path) parse_context_path = staticmethod(ParticipantsClient.parse_context_path) document_path = staticmethod(ParticipantsClient.document_path) @@ -104,6 +106,10 @@ class ParticipantsAsyncClient: ) tool_path = staticmethod(ParticipantsClient.tool_path) parse_tool_path = staticmethod(ParticipantsClient.parse_tool_path) + tool_path = staticmethod(ParticipantsClient.tool_path) + parse_tool_path = staticmethod(ParticipantsClient.parse_tool_path) + toolset_path = staticmethod(ParticipantsClient.toolset_path) + parse_toolset_path = staticmethod(ParticipantsClient.parse_toolset_path) common_billing_account_path = staticmethod( ParticipantsClient.common_billing_account_path ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/client.py index 9f72fefeec53..64d781131a13 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/client.py @@ -254,6 +254,28 @@ def parse_answer_record_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def app_path( + project: str, + location: str, + app: str, + ) -> str: + """Returns a fully-qualified app string.""" + return "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + + @staticmethod + def parse_app_path(path: str) -> Dict[str, str]: + """Parses a app path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def context_path( project: str, @@ -427,6 +449,54 @@ def parse_tool_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def tool_path( + project: str, + location: str, + app: str, + tool: str, + ) -> str: + """Returns a fully-qualified tool string.""" + return "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + + @staticmethod + def parse_tool_path(path: str) -> Dict[str, str]: + """Parses a tool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/tools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def toolset_path( + project: str, + location: str, + app: str, + toolset: str, + ) -> str: + """Returns a fully-qualified toolset string.""" + return "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + + @staticmethod + def parse_toolset_path(path: str) -> Dict[str, str]: + """Parses a toolset path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/apps/(?P.+?)/toolsets/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/async_client.py index 1c48b6fb5b57..4b0a387666ce 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/async_client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/async_client.py @@ -45,6 +45,7 @@ OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore @@ -82,6 +83,8 @@ class PhoneNumbersAsyncClient: phone_number_path = staticmethod(PhoneNumbersClient.phone_number_path) parse_phone_number_path = staticmethod(PhoneNumbersClient.parse_phone_number_path) + sip_trunk_path = staticmethod(PhoneNumbersClient.sip_trunk_path) + parse_sip_trunk_path = staticmethod(PhoneNumbersClient.parse_sip_trunk_path) common_billing_account_path = staticmethod( PhoneNumbersClient.common_billing_account_path ) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/client.py index fa5f32da9a9f..ef83561a07f0 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/client.py @@ -62,6 +62,7 @@ _LOGGER = std_logging.getLogger(__name__) import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore @@ -252,6 +253,28 @@ def parse_phone_number_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def sip_trunk_path( + project: str, + location: str, + siptrunk: str, + ) -> str: + """Returns a fully-qualified sip_trunk string.""" + return "projects/{project}/locations/{location}/sipTrunks/{siptrunk}".format( + project=project, + location=location, + siptrunk=siptrunk, + ) + + @staticmethod + def parse_sip_trunk_path(path: str) -> Dict[str, str]: + """Parses a sip_trunk path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/sipTrunks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/__init__.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/__init__.py index 9c9969e82f7f..819e5212d21c 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/__init__.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/__init__.py @@ -58,6 +58,12 @@ TelephonyDtmfEvents, VoiceSelectionParams, ) +from .ces_app import ( + CesAppSpec, +) +from .ces_tool import ( + CesToolSpec, +) from .context import ( Context, CreateContextRequest, @@ -89,6 +95,7 @@ ListMessagesRequest, ListMessagesResponse, SearchKnowledgeAnswer, + SearchKnowledgeDebugInfo, SearchKnowledgeRequest, SearchKnowledgeResponse, SuggestConversationSummaryRequest, @@ -253,15 +260,18 @@ CompileSuggestionRequest, CompileSuggestionResponse, CreateParticipantRequest, + DatastoreResponseReason, DialogflowAssistAnswer, DtmfParameters, FaqAnswer, GenerateSuggestionsResponse, GetParticipantRequest, + IngestedContextReferenceDebugInfo, InputTextConfig, IntentInput, IntentSuggestion, KnowledgeAssistAnswer, + KnowledgeAssistDebugInfo, ListParticipantsRequest, ListParticipantsResponse, ListSuggestionsRequest, @@ -271,6 +281,7 @@ OutputAudio, Participant, ResponseMessage, + ServiceLatency, SmartReplyAnswer, StreamingAnalyzeContentRequest, StreamingAnalyzeContentResponse, @@ -346,6 +357,9 @@ ToolCall, ToolCallResult, ) +from .toolset import ( + ToolsetTool, +) from .validation_result import ( ValidationError, ValidationResult, @@ -403,6 +417,8 @@ "SpeechModelVariant", "SsmlVoiceGender", "TelephonyDtmf", + "CesAppSpec", + "CesToolSpec", "Context", "CreateContextRequest", "DeleteAllContextsRequest", @@ -431,6 +447,7 @@ "ListMessagesRequest", "ListMessagesResponse", "SearchKnowledgeAnswer", + "SearchKnowledgeDebugInfo", "SearchKnowledgeRequest", "SearchKnowledgeResponse", "SuggestConversationSummaryRequest", @@ -570,10 +587,12 @@ "FaqAnswer", "GenerateSuggestionsResponse", "GetParticipantRequest", + "IngestedContextReferenceDebugInfo", "InputTextConfig", "IntentInput", "IntentSuggestion", "KnowledgeAssistAnswer", + "KnowledgeAssistDebugInfo", "ListParticipantsRequest", "ListParticipantsResponse", "ListSuggestionsRequest", @@ -583,6 +602,7 @@ "OutputAudio", "Participant", "ResponseMessage", + "ServiceLatency", "SmartReplyAnswer", "StreamingAnalyzeContentRequest", "StreamingAnalyzeContentResponse", @@ -600,6 +620,7 @@ "SuggestSmartRepliesRequest", "SuggestSmartRepliesResponse", "UpdateParticipantRequest", + "DatastoreResponseReason", "DeletePhoneNumberRequest", "ListPhoneNumbersRequest", "ListPhoneNumbersResponse", @@ -645,6 +666,7 @@ "UpdateToolRequest", "ToolCall", "ToolCallResult", + "ToolsetTool", "ValidationError", "ValidationResult", "CreateVersionRequest", diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/ces_app.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/ces_app.py new file mode 100644 index 000000000000..7c3688df4c5b --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/ces_app.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dialogflow_v2beta1.types import tool + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.v2beta1", + manifest={ + "CesAppSpec", + }, +) + + +class CesAppSpec(proto.Message): + r"""Spec of CES app that the generator can choose from. + + Attributes: + ces_app (str): + Optional. Format: + ``projects//locations//apps/``. + confirmation_requirement (google.cloud.dialogflow_v2beta1.types.Tool.ConfirmationRequirement): + Optional. Indicates whether the app requires + human confirmation. + """ + + ces_app: str = proto.Field( + proto.STRING, + number=1, + ) + confirmation_requirement: tool.Tool.ConfirmationRequirement = proto.Field( + proto.ENUM, + number=2, + enum=tool.Tool.ConfirmationRequirement, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/ces_tool.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/ces_tool.py new file mode 100644 index 000000000000..e44c12132469 --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/ces_tool.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dialogflow_v2beta1.types import tool + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.v2beta1", + manifest={ + "CesToolSpec", + }, +) + + +class CesToolSpec(proto.Message): + r"""Spec of CES tool that the generator can choose from. + + Attributes: + ces_tool (str): + Optional. Format: + ``projects//locations//apps//tools/``. + confirmation_requirement (google.cloud.dialogflow_v2beta1.types.Tool.ConfirmationRequirement): + Optional. Indicates whether the tool requires + human confirmation. + """ + + ces_tool: str = proto.Field( + proto.STRING, + number=1, + ) + confirmation_requirement: tool.Tool.ConfirmationRequirement = proto.Field( + proto.ENUM, + number=2, + enum=tool.Tool.ConfirmationRequirement, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation.py index 732f0e10aa84..774a3fb17103 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation.py @@ -51,6 +51,7 @@ "GenerateStatelessSuggestionRequest", "GenerateStatelessSuggestionResponse", "SearchKnowledgeRequest", + "SearchKnowledgeDebugInfo", "SearchKnowledgeResponse", "SearchKnowledgeAnswer", "GenerateSuggestionsRequest", @@ -108,9 +109,18 @@ class Conversation(proto.Message): telephony_connection_info (google.cloud.dialogflow_v2beta1.types.Conversation.TelephonyConnectionInfo): Output only. The telephony connection information. + initial_conversation_profile (google.cloud.dialogflow_v2beta1.types.ConversationProfile): + Optional. Output only. The initial + conversation profile to be used to configure + this conversation, which is a copy of the + conversation profile config read at conversation + creation time. ingested_context_references (MutableMapping[str, google.cloud.dialogflow_v2beta1.types.Conversation.ContextReference]): Output only. The context reference updates provided by external systems. + initial_generator_contexts (MutableMapping[str, google.cloud.dialogflow_v2beta1.types.Conversation.GeneratorContext]): + Output only. A map with generator name as key + and generator context as value. """ class LifecycleState(proto.Enum): @@ -347,6 +357,48 @@ class ContentFormat(proto.Enum): message=timestamp_pb2.Timestamp, ) + class GeneratorContext(proto.Message): + r"""Represents the context of a generator. + + Attributes: + generator_type (google.cloud.dialogflow_v2beta1.types.Conversation.GeneratorContext.GeneratorType): + Output only. The type of the generator. + """ + + class GeneratorType(proto.Enum): + r"""The available generator types. + + Values: + GENERATOR_TYPE_UNSPECIFIED (0): + Unspecified generator type. + FREE_FORM (1): + Free form generator type. + AGENT_COACHING (2): + Agent coaching generator type. + SUMMARIZATION (3): + Summarization generator type. + TRANSLATION (4): + Translation generator type. + AGENT_FEEDBACK (5): + Agent feedback generator type. + CUSTOMER_MESSAGE_GENERATION (6): + Customer message generation generator type. + """ + + GENERATOR_TYPE_UNSPECIFIED = 0 + FREE_FORM = 1 + AGENT_COACHING = 2 + SUMMARIZATION = 3 + TRANSLATION = 4 + AGENT_FEEDBACK = 5 + CUSTOMER_MESSAGE_GENERATION = 6 + + generator_type: "Conversation.GeneratorContext.GeneratorType" = proto.Field( + proto.ENUM, + number=1, + enum="Conversation.GeneratorContext.GeneratorType", + ) + name: str = proto.Field( proto.STRING, number=1, @@ -385,12 +437,25 @@ class ContentFormat(proto.Enum): number=10, message=TelephonyConnectionInfo, ) + initial_conversation_profile: gcd_conversation_profile.ConversationProfile = ( + proto.Field( + proto.MESSAGE, + number=15, + message=gcd_conversation_profile.ConversationProfile, + ) + ) ingested_context_references: MutableMapping[str, ContextReference] = proto.MapField( proto.STRING, proto.MESSAGE, number=17, message=ContextReference, ) + initial_generator_contexts: MutableMapping[str, GeneratorContext] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=18, + message=GeneratorContext, + ) class ConversationPhoneNumber(proto.Message): @@ -838,6 +903,9 @@ class Summary(proto.Message): sections. The key is the section's name and the value is the section's content. There is no specific format for the key or value. + sorted_text_sections (MutableSequence[google.cloud.dialogflow_v2beta1.types.SuggestConversationSummaryResponse.Summary.SummarySection]): + Same as text_sections, but in an order that is consistent + with the order of the sections in the generator. answer_record (str): The name of the answer record. Format: @@ -849,6 +917,25 @@ class Summary(proto.Message): was not used to generate this summary. """ + class SummarySection(proto.Message): + r"""A component of the generated summary. + + Attributes: + section (str): + Output only. Name of the section. + summary (str): + Output only. Summary text for the section. + """ + + section: str = proto.Field( + proto.STRING, + number=1, + ) + summary: str = proto.Field( + proto.STRING, + number=2, + ) + text: str = proto.Field( proto.STRING, number=1, @@ -858,6 +945,13 @@ class Summary(proto.Message): proto.STRING, number=4, ) + sorted_text_sections: MutableSequence[ + "SuggestConversationSummaryResponse.Summary.SummarySection" + ] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="SuggestConversationSummaryResponse.Summary.SummarySection", + ) answer_record: str = proto.Field( proto.STRING, number=3, @@ -1549,6 +1643,75 @@ class FilterSpecs(proto.Message): ) +class SearchKnowledgeDebugInfo(proto.Message): + r"""Debug information related to SearchKnowledge feature. + + Attributes: + datastore_response_reason (google.cloud.dialogflow_v2beta1.types.DatastoreResponseReason): + Response reason from datastore which + indicates data serving status or answer quality + degradation. + search_knowledge_behavior (google.cloud.dialogflow_v2beta1.types.SearchKnowledgeDebugInfo.SearchKnowledgeBehavior): + Configured behaviors for SearchKnowledge. + ingested_context_reference_debug_info (google.cloud.dialogflow_v2beta1.types.IngestedContextReferenceDebugInfo): + Information about parameters ingested for + search knowledge. + service_latency (google.cloud.dialogflow_v2beta1.types.ServiceLatency): + The latency of the service. + """ + + class SearchKnowledgeBehavior(proto.Message): + r"""Configured behaviors for SearchKnowledge. + + Attributes: + answer_generation_rewriter_on (bool): + Whether data store agent rewriter was turned + on for the request. + end_user_metadata_included (bool): + Whether end_user_metadata is included in the data store + agent call. + third_party_connector_allowed (bool): + This field indicates whether third party + connectors are enabled for the project. Note + that this field only indicates if the project is + allowlisted for connectors. + """ + + answer_generation_rewriter_on: bool = proto.Field( + proto.BOOL, + number=1, + ) + end_user_metadata_included: bool = proto.Field( + proto.BOOL, + number=2, + ) + third_party_connector_allowed: bool = proto.Field( + proto.BOOL, + number=4, + ) + + datastore_response_reason: participant.DatastoreResponseReason = proto.Field( + proto.ENUM, + number=1, + enum=participant.DatastoreResponseReason, + ) + search_knowledge_behavior: SearchKnowledgeBehavior = proto.Field( + proto.MESSAGE, + number=2, + message=SearchKnowledgeBehavior, + ) + ingested_context_reference_debug_info: participant.IngestedContextReferenceDebugInfo = proto.Field( + proto.MESSAGE, + number=3, + message=participant.IngestedContextReferenceDebugInfo, + ) + service_latency: participant.ServiceLatency = proto.Field( + proto.MESSAGE, + number=4, + message=participant.ServiceLatency, + ) + + class SearchKnowledgeResponse(proto.Message): r"""The response message for [Conversations.SearchKnowledge][google.cloud.dialogflow.v2beta1.Conversations.SearchKnowledge]. @@ -1560,6 +1723,8 @@ class SearchKnowledgeResponse(proto.Message): confidence. rewritten_query (str): The rewritten query used to search knowledge. + search_knowledge_debug_info (google.cloud.dialogflow_v2beta1.types.SearchKnowledgeDebugInfo): + Debug info for SearchKnowledge. """ answers: MutableSequence["SearchKnowledgeAnswer"] = proto.RepeatedField( @@ -1571,6 +1736,11 @@ class SearchKnowledgeResponse(proto.Message): proto.STRING, number=3, ) + search_knowledge_debug_info: "SearchKnowledgeDebugInfo" = proto.Field( + proto.MESSAGE, + number=4, + message="SearchKnowledgeDebugInfo", + ) class SearchKnowledgeAnswer(proto.Message): diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation_profile.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation_profile.py index 45e5ef265ef5..34b35d22ef56 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation_profile.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation_profile.py @@ -64,6 +64,10 @@ class ConversationProfile(proto.Message): update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Update time of the conversation profile. + use_bidi_streaming (bool): + Optional. Whether to use the bidi streaming + API in telephony integration for the + conversation profile. automated_agent_config (google.cloud.dialogflow_v2beta1.types.AutomatedAgentConfig): Configuration for an automated agent to use with this profile. @@ -142,6 +146,10 @@ class ConversationProfile(proto.Message): number=12, message=timestamp_pb2.Timestamp, ) + use_bidi_streaming: bool = proto.Field( + proto.BOOL, + number=23, + ) automated_agent_config: "AutomatedAgentConfig" = proto.Field( proto.MESSAGE, number=3, @@ -845,6 +853,30 @@ class MessageAnalysisConfig(proto.Message): [ListMessagesResponse.messages.SentimentAnalysisResult][google.cloud.dialogflow.v2beta1.ListMessagesResponse.messages] If Pub/Sub notification is configured, result will be in [ConversationEvent.new_message_payload.SentimentAnalysisResult][google.cloud.dialogflow.v2beta1.ConversationEvent.new_message_payload]. + enable_sentiment_analysis_v3 (bool): + Optional. Enables sentiment analysis for audio input and + conversation messages. If unspecified, defaults to false. If + this flag is set to true, other 'enable_sentiment_analysis' + fields will be ignored. + + Sentiment analysis inspects user input and identifies the + prevailing subjective opinion, especially to determine a + user's attitude as positive, negative, or neutral. + https://cloud.google.com/natural-language/docs/basics#sentiment_analysis + For + [Participants.StreamingAnalyzeContent][google.cloud.dialogflow.v2beta1.Participants.StreamingAnalyzeContent] + method, result will be in + [StreamingAnalyzeContentResponse.message.SentimentAnalysisResult][google.cloud.dialogflow.v2beta1.StreamingAnalyzeContentResponse.message]. + For + [Participants.AnalyzeContent][google.cloud.dialogflow.v2beta1.Participants.AnalyzeContent] + method, result will be in + [AnalyzeContentResponse.message.SentimentAnalysisResult][google.cloud.dialogflow.v2beta1.AnalyzeContentResponse.message] + For + [Conversations.ListMessages][google.cloud.dialogflow.v2beta1.Conversations.ListMessages] + method, result will be in + [ListMessagesResponse.messages.SentimentAnalysisResult][google.cloud.dialogflow.v2beta1.ListMessagesResponse.messages] + If Pub/Sub notification is configured, result will be in + [ConversationEvent.new_message_payload.SentimentAnalysisResult][google.cloud.dialogflow.v2beta1.ConversationEvent.new_message_payload]. """ enable_entity_extraction: bool = proto.Field( @@ -855,6 +887,10 @@ class MessageAnalysisConfig(proto.Message): proto.BOOL, number=3, ) + enable_sentiment_analysis_v3: bool = proto.Field( + proto.BOOL, + number=5, + ) notification_config: "NotificationConfig" = proto.Field( proto.MESSAGE, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/generator.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/generator.py index 19f64aa8dc7e..6f8748305136 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/generator.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/generator.py @@ -21,7 +21,12 @@ import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import proto # type: ignore -from google.cloud.dialogflow_v2beta1.types import agent_coaching_instruction +from google.cloud.dialogflow_v2beta1.types import ( + agent_coaching_instruction, + ces_app, + ces_tool, + toolset, +) from google.cloud.dialogflow_v2beta1.types import tool_call as gcd_tool_call __protobuf__ = proto.module( @@ -675,6 +680,15 @@ class Generator(proto.Message): Optional. Configuration for suggestion deduping. This is only applicable to AI Coach feature. + toolset_tools (MutableSequence[google.cloud.dialogflow_v2beta1.types.ToolsetTool]): + Optional. List of CES toolset specs that the + generator can choose from. + ces_tool_specs (MutableSequence[google.cloud.dialogflow_v2beta1.types.CesToolSpec]): + Optional. List of CES tool specs that the + generator can choose from. + ces_app_specs (MutableSequence[google.cloud.dialogflow_v2beta1.types.CesAppSpec]): + Optional. List of CES app specs that the + generator can choose from. """ name: str = proto.Field( @@ -737,6 +751,21 @@ class Generator(proto.Message): number=23, message="SuggestionDedupingConfig", ) + toolset_tools: MutableSequence[toolset.ToolsetTool] = proto.RepeatedField( + proto.MESSAGE, + number=27, + message=toolset.ToolsetTool, + ) + ces_tool_specs: MutableSequence[ces_tool.CesToolSpec] = proto.RepeatedField( + proto.MESSAGE, + number=28, + message=ces_tool.CesToolSpec, + ) + ces_app_specs: MutableSequence[ces_app.CesAppSpec] = proto.RepeatedField( + proto.MESSAGE, + number=29, + message=ces_app.CesAppSpec, + ) class FreeFormSuggestion(proto.Message): diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/participant.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/participant.py index 51baa4b7cb39..a4bf988ccc12 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/participant.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/participant.py @@ -29,6 +29,7 @@ __protobuf__ = proto.module( package="google.cloud.dialogflow.v2beta1", manifest={ + "DatastoreResponseReason", "Participant", "Message", "CreateParticipantRequest", @@ -73,6 +74,9 @@ "ResponseMessage", "SuggestKnowledgeAssistRequest", "SuggestKnowledgeAssistResponse", + "IngestedContextReferenceDebugInfo", + "ServiceLatency", + "KnowledgeAssistDebugInfo", "KnowledgeAssistAnswer", "BidiStreamingAnalyzeContentRequest", "BidiStreamingAnalyzeContentResponse", @@ -80,6 +84,49 @@ ) +class DatastoreResponseReason(proto.Enum): + r"""Response reason from datastore which indicates data serving + status or answer quality degradation. + + Values: + DATASTORE_RESPONSE_REASON_UNSPECIFIED (0): + Default value. + NONE (1): + No specific response reason from datastore. + SEARCH_OUT_OF_QUOTA (2): + Search is blocked due to out of quota. + SEARCH_EMPTY_RESULTS (3): + Search returns empty results. + ANSWER_GENERATION_GEN_AI_DISABLED (4): + Generative AI is disabled. + ANSWER_GENERATION_OUT_OF_QUOTA (5): + Answer generation is blocked due to out of + quota. + ANSWER_GENERATION_ERROR (6): + Answer generation encounters an error. + ANSWER_GENERATION_NOT_ENOUGH_INFO (7): + Answer generation does not have enough + information to generate answer. + ANSWER_GENERATION_RAI_FAILED (8): + Answer generation is blocked by RAI + (Responsible AI) failure. + ANSWER_GENERATION_NOT_GROUNDED (9): + Answer generation is not grounded on reliable + sources. + """ + + DATASTORE_RESPONSE_REASON_UNSPECIFIED = 0 + NONE = 1 + SEARCH_OUT_OF_QUOTA = 2 + SEARCH_EMPTY_RESULTS = 3 + ANSWER_GENERATION_GEN_AI_DISABLED = 4 + ANSWER_GENERATION_OUT_OF_QUOTA = 5 + ANSWER_GENERATION_ERROR = 6 + ANSWER_GENERATION_NOT_ENOUGH_INFO = 7 + ANSWER_GENERATION_RAI_FAILED = 8 + ANSWER_GENERATION_NOT_GROUNDED = 9 + + class Participant(proto.Message): r"""Represents a conversation participant (human agent, virtual agent, end-user). @@ -2867,6 +2914,406 @@ class SuggestKnowledgeAssistResponse(proto.Message): ) +class IngestedContextReferenceDebugInfo(proto.Message): + r"""Debug information related to ingested context reference. + + Attributes: + project_not_allowlisted (bool): + Indicates if the project is allowlisted to + use ingested context reference. + context_reference_retrieved (bool): + The status of context_reference retrieval from database. + ingested_parameters_debug_info (MutableSequence[google.cloud.dialogflow_v2beta1.types.IngestedContextReferenceDebugInfo.IngestedParameterDebugInfo]): + Parameters ingested from the context + reference. + """ + + class IngestedParameterDebugInfo(proto.Message): + r"""Debug information related to ingested parameters from context + reference. + + Attributes: + parameter (str): + The name of the parameter in the context + reference. + ingestion_status (google.cloud.dialogflow_v2beta1.types.IngestedContextReferenceDebugInfo.IngestedParameterDebugInfo.IngestionStatus): + The ingestion status for this specific + parameter. + """ + + class IngestionStatus(proto.Enum): + r"""Enum representing the various states of parameter ingestion. + + Values: + INGESTION_STATUS_UNSPECIFIED (0): + Default value, indicates that the ingestion + status is not specified. + INGESTION_STATUS_SUCCEEDED (1): + Indicates that the parameter was successfully + ingested. + INGESTION_STATUS_CONTEXT_NOT_AVAILABLE (2): + Indicates that the parameter was not + available for ingestion. + INGESTION_STATUS_PARSE_FAILED (3): + Indicates that there was a failure parsing + the parameter content. + INGESTION_STATUS_INVALID_ENTRY (4): + Indicates that the context reference had an + unexpected number of content entries as Context + reference should only have one entry. + INGESTION_STATUS_INVALID_FORMAT (5): + Indicates that the context reference content + was not in the expected format (e.g., JSON). + INGESTION_STATUS_LANGUAGE_MISMATCH (6): + Indicates that the context reference language + does not match the conversation language. + """ + + INGESTION_STATUS_UNSPECIFIED = 0 + INGESTION_STATUS_SUCCEEDED = 1 + INGESTION_STATUS_CONTEXT_NOT_AVAILABLE = 2 + INGESTION_STATUS_PARSE_FAILED = 3 + INGESTION_STATUS_INVALID_ENTRY = 4 + INGESTION_STATUS_INVALID_FORMAT = 5 + INGESTION_STATUS_LANGUAGE_MISMATCH = 6 + + parameter: str = proto.Field( + proto.STRING, + number=1, + ) + ingestion_status: "IngestedContextReferenceDebugInfo.IngestedParameterDebugInfo.IngestionStatus" = proto.Field( + proto.ENUM, + number=2, + enum="IngestedContextReferenceDebugInfo.IngestedParameterDebugInfo.IngestionStatus", + ) + + project_not_allowlisted: bool = proto.Field( + proto.BOOL, + number=1, + ) + context_reference_retrieved: bool = proto.Field( + proto.BOOL, + number=2, + ) + ingested_parameters_debug_info: MutableSequence[IngestedParameterDebugInfo] = ( + proto.RepeatedField( + proto.MESSAGE, + number=3, + message=IngestedParameterDebugInfo, + ) + ) + + +class ServiceLatency(proto.Message): + r"""Message to represent the latency of the service. + + Attributes: + internal_service_latencies (MutableSequence[google.cloud.dialogflow_v2beta1.types.ServiceLatency.InternalServiceLatency]): + A list of internal service latencies. + """ + + class InternalServiceLatency(proto.Message): + r"""Message to represent the latency of an internal service. + + Attributes: + step (str): + The name of the internal service. + latency_ms (float): + The latency of the internal service in + milliseconds. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The start time of the internal service. + complete_time (google.protobuf.timestamp_pb2.Timestamp): + The completion time of the internal service. + """ + + step: str = proto.Field( + proto.STRING, + number=1, + ) + latency_ms: float = proto.Field( + proto.FLOAT, + number=2, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + complete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + internal_service_latencies: MutableSequence[InternalServiceLatency] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=InternalServiceLatency, + ) + ) + + +class KnowledgeAssistDebugInfo(proto.Message): + r"""Debug information related to Knowledge Assist feature. + + Attributes: + query_generation_failure_reason (google.cloud.dialogflow_v2beta1.types.KnowledgeAssistDebugInfo.QueryGenerationFailureReason): + Reason for query generation. + query_categorization_failure_reason (google.cloud.dialogflow_v2beta1.types.KnowledgeAssistDebugInfo.QueryCategorizationFailureReason): + Reason for query categorization. + datastore_response_reason (google.cloud.dialogflow_v2beta1.types.DatastoreResponseReason): + Response reason from datastore which + indicates data serving status or answer quality + degradation. + knowledge_assist_behavior (google.cloud.dialogflow_v2beta1.types.KnowledgeAssistDebugInfo.KnowledgeAssistBehavior): + Configured behaviors for Knowedge Assist. + ingested_context_reference_debug_info (google.cloud.dialogflow_v2beta1.types.IngestedContextReferenceDebugInfo): + Information about parameters ingested for + search knowledge. + service_latency (google.cloud.dialogflow_v2beta1.types.ServiceLatency): + The latency of the service. + """ + + class QueryGenerationFailureReason(proto.Enum): + r"""Reason for query generation failure. + + Values: + QUERY_GENERATION_FAILURE_REASON_UNSPECIFIED (0): + Default value. + QUERY_GENERATION_OUT_OF_QUOTA (1): + Query generation is blocked due to out of + quota. + QUERY_GENERATION_FAILED (2): + Call to Knowedge Assist query generation + model fails. + QUERY_GENERATION_NO_QUERY_GENERATED (3): + Query generation model decides that there is + no new topic change or there has been similar + queries generated in the previous turns. + QUERY_GENERATION_RAI_FAILED (4): + Knowedge Assist generated query is blocked by + RAI (Responsible AI). + NOT_IN_ALLOWLIST (5): + Query generation is blocked by Knowledge + Assist conversation profile level / agent id + level filtering. + QUERY_GENERATION_QUERY_REDACTED (6): + The generated query is blocked due to + redaction. + QUERY_GENERATION_LLM_RESPONSE_PARSE_FAILED (10): + Query generation failed due to LLM response + parse failure. + QUERY_GENERATION_EMPTY_CONVERSATION (11): + The conversation has no messages. + QUERY_GENERATION_EMPTY_LAST_MESSAGE (12): + The last message in the conversation is + empty. + QUERY_GENERATION_TRIGGERING_EVENT_CONDITION_NOT_MET (13): + The trigger event condition is not met. This occurs in the + following scenarios: + + 1. The trigger_event is CUSTOMER_MESSAGE or UNSPECIFIED, but + the last message is not from the customer. + 2. The trigger_event is AGENT_MESSAGE, but the last message + is not from the agent. + """ + + QUERY_GENERATION_FAILURE_REASON_UNSPECIFIED = 0 + QUERY_GENERATION_OUT_OF_QUOTA = 1 + QUERY_GENERATION_FAILED = 2 + QUERY_GENERATION_NO_QUERY_GENERATED = 3 + QUERY_GENERATION_RAI_FAILED = 4 + NOT_IN_ALLOWLIST = 5 + QUERY_GENERATION_QUERY_REDACTED = 6 + QUERY_GENERATION_LLM_RESPONSE_PARSE_FAILED = 10 + QUERY_GENERATION_EMPTY_CONVERSATION = 11 + QUERY_GENERATION_EMPTY_LAST_MESSAGE = 12 + QUERY_GENERATION_TRIGGERING_EVENT_CONDITION_NOT_MET = 13 + + class QueryCategorizationFailureReason(proto.Enum): + r"""Reason for query categorization failure. + + Values: + QUERY_CATEGORIZATION_FAILURE_REASON_UNSPECIFIED (0): + Default value. + QUERY_CATEGORIZATION_INVALID_CONFIG (1): + Vertex AI Search config supplied for query + categorization is invalid. + QUERY_CATEGORIZATION_RESULT_NOT_FOUND (2): + Vertex AI Search result does not contain a + query categorization result. + QUERY_CATEGORIZATION_FAILED (3): + Vertex AI Search call fails. + """ + + QUERY_CATEGORIZATION_FAILURE_REASON_UNSPECIFIED = 0 + QUERY_CATEGORIZATION_INVALID_CONFIG = 1 + QUERY_CATEGORIZATION_RESULT_NOT_FOUND = 2 + QUERY_CATEGORIZATION_FAILED = 3 + + class KnowledgeAssistBehavior(proto.Message): + r"""Configured behaviors for Knowedge Assist. + + Attributes: + answer_generation_rewriter_on (bool): + Whether data store agent rewriter was turned + off for the request. + end_user_metadata_included (bool): + Whether end_user_metadata is included in the data store + agent call. + return_query_only (bool): + Whether customers configured to return query + only in the conversation profile. + use_pubsub_delivery (bool): + Whether customers configured to use pubsub to + deliver. + disable_sync_delivery (bool): + Whether customers configured to disable the + synchronous delivery of Knowedge Assist + response. + previous_queries_included (bool): + Whether previously suggested queries are + included in the query generation process. + use_translated_message (bool): + Translated message is included in query + generation process. + use_custom_safety_filter_level (bool): + Safety filter is adjusted by user. + conversation_transcript_has_mixed_languages (bool): + Conversation transcript has mixed languages. + query_generation_agent_language_mismatch (bool): + Whether the agent language from the + translation generator mismatches the end-user + language. + query_generation_end_user_language_mismatch (bool): + Whether the end-user language from the + translation generator mismatches the end-user + language. + third_party_connector_allowed (bool): + This field indicates whether third party + connectors are enabled for the project. + multiple_queries_generated (bool): + Indicates that the query generation model + generated multiple queries. + query_contained_search_context (bool): + Indicates that the generated query contains + search context. + invalid_items_query_suggestion_skipped (bool): + Indicates that invalid items were skipped + when parsing the LLM response. + primary_query_redacted_and_replaced (bool): + True if the primary suggested query was + redacted and replaced by an additional query. + appended_search_context_count (int): + The number of search contexts appended to the + query. + """ + + answer_generation_rewriter_on: bool = proto.Field( + proto.BOOL, + number=1, + ) + end_user_metadata_included: bool = proto.Field( + proto.BOOL, + number=2, + ) + return_query_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + use_pubsub_delivery: bool = proto.Field( + proto.BOOL, + number=5, + ) + disable_sync_delivery: bool = proto.Field( + proto.BOOL, + number=6, + ) + previous_queries_included: bool = proto.Field( + proto.BOOL, + number=7, + ) + use_translated_message: bool = proto.Field( + proto.BOOL, + number=8, + ) + use_custom_safety_filter_level: bool = proto.Field( + proto.BOOL, + number=9, + ) + conversation_transcript_has_mixed_languages: bool = proto.Field( + proto.BOOL, + number=10, + ) + query_generation_agent_language_mismatch: bool = proto.Field( + proto.BOOL, + number=11, + ) + query_generation_end_user_language_mismatch: bool = proto.Field( + proto.BOOL, + number=12, + ) + third_party_connector_allowed: bool = proto.Field( + proto.BOOL, + number=13, + ) + multiple_queries_generated: bool = proto.Field( + proto.BOOL, + number=14, + ) + query_contained_search_context: bool = proto.Field( + proto.BOOL, + number=15, + ) + invalid_items_query_suggestion_skipped: bool = proto.Field( + proto.BOOL, + number=16, + ) + primary_query_redacted_and_replaced: bool = proto.Field( + proto.BOOL, + number=17, + ) + appended_search_context_count: int = proto.Field( + proto.INT32, + number=18, + ) + + query_generation_failure_reason: QueryGenerationFailureReason = proto.Field( + proto.ENUM, + number=1, + enum=QueryGenerationFailureReason, + ) + query_categorization_failure_reason: QueryCategorizationFailureReason = proto.Field( + proto.ENUM, + number=2, + enum=QueryCategorizationFailureReason, + ) + datastore_response_reason: "DatastoreResponseReason" = proto.Field( + proto.ENUM, + number=3, + enum="DatastoreResponseReason", + ) + knowledge_assist_behavior: KnowledgeAssistBehavior = proto.Field( + proto.MESSAGE, + number=4, + message=KnowledgeAssistBehavior, + ) + ingested_context_reference_debug_info: "IngestedContextReferenceDebugInfo" = ( + proto.Field( + proto.MESSAGE, + number=5, + message="IngestedContextReferenceDebugInfo", + ) + ) + service_latency: "ServiceLatency" = proto.Field( + proto.MESSAGE, + number=6, + message="ServiceLatency", + ) + + class KnowledgeAssistAnswer(proto.Message): r"""Represents a Knowledge Assist answer. @@ -2882,6 +3329,9 @@ class KnowledgeAssistAnswer(proto.Message): answer_record (str): The name of the answer record. Format: ``projects//locations//answer Records/``. + knowledge_assist_debug_info (google.cloud.dialogflow_v2beta1.types.KnowledgeAssistDebugInfo): + Debug information related to Knowledge Assist + feature. """ class SuggestedQuery(proto.Message): @@ -3017,6 +3467,11 @@ class Snippet(proto.Message): proto.STRING, number=3, ) + knowledge_assist_debug_info: "KnowledgeAssistDebugInfo" = proto.Field( + proto.MESSAGE, + number=7, + message="KnowledgeAssistDebugInfo", + ) class BidiStreamingAnalyzeContentRequest(proto.Message): diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/phone_number.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/phone_number.py index debe4c0bfabd..d70fb38685e4 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/phone_number.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/phone_number.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -39,6 +40,9 @@ class PhoneNumber(proto.Message): project through a [PhoneNumberOrder][google.cloud.dialogflow.v2beta1.PhoneNumberOrder]. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Optional. The unique identifier of this phone number. @@ -66,6 +70,14 @@ class PhoneNumber(proto.Message): ``ACTIVE``. ``PhoneNumber`` objects set to ``DELETE_REQUESTED`` always decline incoming calls and can be removed completely within 30 days. + allowed_sip_trunks (google.cloud.dialogflow_v2beta1.types.PhoneNumber.AllowedSipTrunks): + Optional. Only allow calls from the specified + SIP trunks. + + This field is a member of `oneof`_ ``inbound_restriction``. + purge_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which this resource + will be purged. """ class LifecycleState(proto.Enum): @@ -85,6 +97,31 @@ class LifecycleState(proto.Enum): ACTIVE = 1 DELETE_REQUESTED = 2 + class AllowedSipTrunks(proto.Message): + r"""List of SIP trunks that are allowed to make calls to this + phone number. + + Attributes: + sip_trunks (MutableSequence[str]): + List of SIP trunks that are allowed to make + calls to this phone number. If empty, any SIP + trunk is allowed. + carrier_ids (MutableSequence[str]): + Optional. List of GTP carrier IDs allowed to + make calls to this phone number. Used for + private interconnects where standard SIP trunks + aren't applicable. + """ + + sip_trunks: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + carrier_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -102,6 +139,17 @@ class LifecycleState(proto.Enum): number=4, enum=LifecycleState, ) + allowed_sip_trunks: AllowedSipTrunks = proto.Field( + proto.MESSAGE, + number=7, + oneof="inbound_restriction", + message=AllowedSipTrunks, + ) + purge_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) class DeletePhoneNumberRequest(proto.Message): diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/tool_call.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/tool_call.py index c93222d4f7eb..d6f9beb49f13 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/tool_call.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/tool_call.py @@ -34,6 +34,10 @@ class ToolCall(proto.Message): r"""Represents a call of a specific tool's action with the specified inputs. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -43,6 +47,21 @@ class ToolCall(proto.Message): associated with this call. Format: ``projects//locations//tools/``. + This field is a member of `oneof`_ ``source``. + ces_tool (str): + Optional. CES tool name for this call. Format: + ``projects//locations//apps//tools/``. + + This field is a member of `oneof`_ ``source``. + ces_toolset (str): + Optional. CES toolset name for this call. Format: + ``projects//locations//apps//toolsets/ToolsetID>``. + + This field is a member of `oneof`_ ``source``. + ces_app (str): + Optional. CES app name for this call. Format: + ``projects//locations//apps/``. + This field is a member of `oneof`_ ``source``. tool_display_name (str): Optional. A human readable short name of the @@ -86,6 +105,21 @@ class State(proto.Enum): number=1, oneof="source", ) + ces_tool: str = proto.Field( + proto.STRING, + number=11, + oneof="source", + ) + ces_toolset: str = proto.Field( + proto.STRING, + number=12, + oneof="source", + ) + ces_app: str = proto.Field( + proto.STRING, + number=8, + oneof="source", + ) tool_display_name: str = proto.Field( proto.STRING, number=9, @@ -135,6 +169,21 @@ class ToolCallResult(proto.Message): associated with this call. Format: ``projects//locations//tools/``. + This field is a member of `oneof`_ ``source``. + ces_tool (str): + Optional. CES tool name for this call. Format: + ``projects//locations//apps//tools/``. + + This field is a member of `oneof`_ ``source``. + ces_toolset (str): + Optional. CES toolset name for this call. Format: + ``projects//locations//apps//toolsets/ToolsetID>``. + + This field is a member of `oneof`_ ``source``. + ces_app (str): + Optional. CES app name for this call. Format: + ``projects//locations//apps/``. + This field is a member of `oneof`_ ``source``. action (str): Optional. The name of the tool's action @@ -180,6 +229,21 @@ class Error(proto.Message): number=1, oneof="source", ) + ces_tool: str = proto.Field( + proto.STRING, + number=12, + oneof="source", + ) + ces_toolset: str = proto.Field( + proto.STRING, + number=13, + oneof="source", + ) + ces_app: str = proto.Field( + proto.STRING, + number=11, + oneof="source", + ) action: str = proto.Field( proto.STRING, number=2, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/toolset.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/toolset.py new file mode 100644 index 000000000000..cdc6d5c48ed8 --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/toolset.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dialogflow_v2beta1.types import tool + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.v2beta1", + manifest={ + "ToolsetTool", + }, +) + + +class ToolsetTool(proto.Message): + r"""A tool that is created from a toolset. + + Attributes: + toolset (str): + Required. The name of the toolset to retrieve the schema + for. Format: + ``projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}`` + operation_id (str): + Optional. The operationId field of the + OpenAPI endpoint. The operationId must be + present in the toolset's definition. + confirmation_requirement (google.cloud.dialogflow_v2beta1.types.Tool.ConfirmationRequirement): + Optional. Indicates whether the tool requires + human confirmation. + """ + + toolset: str = proto.Field( + proto.STRING, + number=1, + ) + operation_id: str = proto.Field( + proto.STRING, + number=2, + ) + confirmation_requirement: tool.Tool.ConfirmationRequirement = proto.Field( + proto.ENUM, + number=3, + enum=tool.Tool.ConfirmationRequirement, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_answer_records.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_answer_records.py index eb4d7518a5d8..6bafa0e8b18d 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_answer_records.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_answer_records.py @@ -3392,6 +3392,9 @@ def test_update_answer_record_rest_call_success(request_type): { "tool_call": { "tool": "tool_value", + "ces_tool": "ces_tool_value", + "ces_toolset": "ces_toolset_value", + "ces_app": "ces_app_value", "tool_display_name": "tool_display_name_value", "tool_display_details": "tool_display_details_value", "action": "action_value", @@ -3402,6 +3405,9 @@ def test_update_answer_record_rest_call_success(request_type): }, "tool_call_result": { "tool": "tool_value", + "ces_toolset": "ces_toolset_value", + "ces_tool": "ces_tool_value", + "ces_app": "ces_app_value", "action": "action_value", "error": {"message": "message_value"}, "raw_content": b"raw_content_blob", @@ -4409,10 +4415,36 @@ def test_parse_answer_record_path(): assert expected == actual -def test_context_path(): +def test_app_path(): project = "oyster" - session = "nudibranch" - context = "cuttlefish" + location = "nudibranch" + app = "cuttlefish" + expected = "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + actual = AnswerRecordsClient.app_path(project, location, app) + assert expected == actual + + +def test_parse_app_path(): + expected = { + "project": "mussel", + "location": "winkle", + "app": "nautilus", + } + path = AnswerRecordsClient.app_path(**expected) + + # Check that the path construction is reversible. + actual = AnswerRecordsClient.parse_app_path(path) + assert expected == actual + + +def test_context_path(): + project = "scallop" + session = "abalone" + context = "squid" expected = "projects/{project}/agent/sessions/{session}/contexts/{context}".format( project=project, session=session, @@ -4424,9 +4456,9 @@ def test_context_path(): def test_parse_context_path(): expected = { - "project": "mussel", - "session": "winkle", - "context": "nautilus", + "project": "clam", + "session": "whelk", + "context": "octopus", } path = AnswerRecordsClient.context_path(**expected) @@ -4436,8 +4468,8 @@ def test_parse_context_path(): def test_intent_path(): - project = "scallop" - intent = "abalone" + project = "oyster" + intent = "nudibranch" expected = "projects/{project}/agent/intents/{intent}".format( project=project, intent=intent, @@ -4448,8 +4480,8 @@ def test_intent_path(): def test_parse_intent_path(): expected = { - "project": "squid", - "intent": "clam", + "project": "cuttlefish", + "intent": "mussel", } path = AnswerRecordsClient.intent_path(**expected) @@ -4459,9 +4491,38 @@ def test_parse_intent_path(): def test_tool_path(): - project = "whelk" - location = "octopus" - tool = "oyster" + project = "winkle" + location = "nautilus" + app = "scallop" + tool = "abalone" + expected = "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + actual = AnswerRecordsClient.tool_path(project, location, app, tool) + assert expected == actual + + +def test_parse_tool_path(): + expected = { + "project": "squid", + "location": "clam", + "app": "whelk", + "tool": "octopus", + } + path = AnswerRecordsClient.tool_path(**expected) + + # Check that the path construction is reversible. + actual = AnswerRecordsClient.parse_tool_path(path) + assert expected == actual + + +def test_tool_path(): + project = "oyster" + location = "nudibranch" + tool = "cuttlefish" expected = "projects/{project}/locations/{location}/tools/{tool}".format( project=project, location=location, @@ -4473,9 +4534,9 @@ def test_tool_path(): def test_parse_tool_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "tool": "mussel", + "project": "mussel", + "location": "winkle", + "tool": "nautilus", } path = AnswerRecordsClient.tool_path(**expected) @@ -4484,8 +4545,39 @@ def test_parse_tool_path(): assert expected == actual +def test_toolset_path(): + project = "scallop" + location = "abalone" + app = "squid" + toolset = "clam" + expected = ( + "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + ) + actual = AnswerRecordsClient.toolset_path(project, location, app, toolset) + assert expected == actual + + +def test_parse_toolset_path(): + expected = { + "project": "whelk", + "location": "octopus", + "app": "oyster", + "toolset": "nudibranch", + } + path = AnswerRecordsClient.toolset_path(**expected) + + # Check that the path construction is reversible. + actual = AnswerRecordsClient.parse_toolset_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -4495,7 +4587,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "mussel", } path = AnswerRecordsClient.common_billing_account_path(**expected) @@ -4505,7 +4597,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -4515,7 +4607,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "nautilus", } path = AnswerRecordsClient.common_folder_path(**expected) @@ -4525,7 +4617,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -4535,7 +4627,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "abalone", } path = AnswerRecordsClient.common_organization_path(**expected) @@ -4545,7 +4637,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -4555,7 +4647,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "clam", } path = AnswerRecordsClient.common_project_path(**expected) @@ -4565,8 +4657,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -4577,8 +4669,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "oyster", + "location": "nudibranch", } path = AnswerRecordsClient.common_location_path(**expected) diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py index 5943fbe7b2a1..0b54dd0e3cda 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py @@ -6467,6 +6467,7 @@ def test_create_conversation_profile_rest_call_success(request_type): "message_analysis_config": { "enable_entity_extraction": True, "enable_sentiment_analysis": True, + "enable_sentiment_analysis_v3": True, }, }, "human_agent_handoff_config": { @@ -6810,6 +6811,7 @@ def test_update_conversation_profile_rest_call_success(request_type): "message_analysis_config": { "enable_entity_extraction": True, "enable_sentiment_analysis": True, + "enable_sentiment_analysis_v3": True, }, }, "human_agent_handoff_config": { diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py index 861494a5c48b..b1ff4438323a 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py @@ -71,12 +71,16 @@ from google.cloud.dialogflow_v2.types import ( agent_coaching_instruction, audio_config, + ces_app, + ces_tool, conversation, conversation_profile, generator, participant, session, + tool, tool_call, + toolset, ) from google.cloud.dialogflow_v2.types import conversation as gcd_conversation @@ -8147,7 +8151,132 @@ def test_create_conversation_rest_call_success(request_type): {"mime_type": "mime_type_value", "content": b"content_blob"} ], }, + "initial_conversation_profile": { + "name": "name_value", + "display_name": "display_name_value", + "create_time": {}, + "update_time": {}, + "automated_agent_config": { + "agent": "agent_value", + "session_ttl": {"seconds": 751, "nanos": 543}, + }, + "human_agent_assistant_config": { + "notification_config": {"topic": "topic_value", "message_format": 1}, + "human_agent_suggestion_config": { + "feature_configs": [ + { + "suggestion_feature": {"type_": 1}, + "enable_event_based_suggestion": True, + "disable_agent_query_logging": True, + "enable_query_suggestion_when_no_answer": True, + "enable_conversation_augmented_query": True, + "enable_query_suggestion_only": True, + "enable_response_debug_info": True, + "rai_settings": { + "rai_category_configs": [ + {"category": 1, "sensitivity_level": 1} + ] + }, + "suggestion_trigger_settings": { + "no_smalltalk": True, + "only_end_user": True, + }, + "query_config": { + "knowledge_base_query_source": { + "knowledge_bases": [ + "knowledge_bases_value1", + "knowledge_bases_value2", + ] + }, + "document_query_source": { + "documents": [ + "documents_value1", + "documents_value2", + ] + }, + "dialogflow_query_source": { + "agent": "agent_value", + "human_agent_side_config": {"agent": "agent_value"}, + }, + "max_results": 1207, + "confidence_threshold": 0.2106, + "context_filter_settings": { + "drop_handoff_messages": True, + "drop_virtual_agent_messages": True, + "drop_ivr_messages": True, + }, + "sections": {"section_types": [1]}, + "context_size": 1311, + }, + "conversation_model_config": { + "model": "model_value", + "baseline_model_version": "baseline_model_version_value", + }, + "conversation_process_config": { + "recent_sentences_count": 2352 + }, + } + ], + "group_suggestion_responses": True, + "generators": ["generators_value1", "generators_value2"], + "disable_high_latency_features_sync_delivery": True, + "skip_empty_event_based_suggestion": True, + "use_unredacted_conversation_data": True, + "enable_async_tool_call": True, + }, + "end_user_suggestion_config": {}, + "message_analysis_config": { + "enable_entity_extraction": True, + "enable_sentiment_analysis": True, + "enable_sentiment_analysis_v3": True, + }, + }, + "human_agent_handoff_config": { + "live_person_config": {"account_number": "account_number_value"}, + "salesforce_live_agent_config": { + "organization_id": "organization_id_value", + "deployment_id": "deployment_id_value", + "button_id": "button_id_value", + "endpoint_domain": "endpoint_domain_value", + }, + }, + "notification_config": {}, + "logging_config": {"enable_stackdriver_logging": True}, + "new_message_event_notification_config": {}, + "new_recognition_result_notification_config": {}, + "stt_config": { + "speech_model_variant": 1, + "model": "model_value", + "phrase_sets": ["phrase_sets_value1", "phrase_sets_value2"], + "audio_encoding": 1, + "sample_rate_hertz": 1817, + "language_code": "language_code_value", + "enable_word_info": True, + "use_timeout_based_endpointing": True, + }, + "language_code": "language_code_value", + "time_zone": "time_zone_value", + "security_settings": "security_settings_value", + "tts_config": { + "speaking_rate": 0.1373, + "pitch": 0.536, + "volume_gain_db": 0.1467, + "effects_profile_id": [ + "effects_profile_id_value1", + "effects_profile_id_value2", + ], + "voice": {"name": "name_value", "ssml_gender": 1}, + "pronunciations": [ + { + "phrase": "phrase_value", + "phonetic_encoding": 1, + "pronunciation": "pronunciation_value", + } + ], + }, + }, "ingested_context_references": {}, + "initial_generator_contexts": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -10773,9 +10902,35 @@ def test_parse_answer_record_path(): assert expected == actual -def test_conversation_path(): +def test_app_path(): project = "cuttlefish" - conversation = "mussel" + location = "mussel" + app = "winkle" + expected = "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + actual = ConversationsClient.app_path(project, location, app) + assert expected == actual + + +def test_parse_app_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "app": "abalone", + } + path = ConversationsClient.app_path(**expected) + + # Check that the path construction is reversible. + actual = ConversationsClient.parse_app_path(path) + assert expected == actual + + +def test_conversation_path(): + project = "squid" + conversation = "clam" expected = "projects/{project}/conversations/{conversation}".format( project=project, conversation=conversation, @@ -10786,8 +10941,8 @@ def test_conversation_path(): def test_parse_conversation_path(): expected = { - "project": "winkle", - "conversation": "nautilus", + "project": "whelk", + "conversation": "octopus", } path = ConversationsClient.conversation_path(**expected) @@ -10797,9 +10952,9 @@ def test_parse_conversation_path(): def test_conversation_model_path(): - project = "scallop" - location = "abalone" - conversation_model = "squid" + project = "oyster" + location = "nudibranch" + conversation_model = "cuttlefish" expected = "projects/{project}/locations/{location}/conversationModels/{conversation_model}".format( project=project, location=location, @@ -10813,9 +10968,9 @@ def test_conversation_model_path(): def test_parse_conversation_model_path(): expected = { - "project": "clam", - "location": "whelk", - "conversation_model": "octopus", + "project": "mussel", + "location": "winkle", + "conversation_model": "nautilus", } path = ConversationsClient.conversation_model_path(**expected) @@ -10825,8 +10980,8 @@ def test_parse_conversation_model_path(): def test_conversation_profile_path(): - project = "oyster" - conversation_profile = "nudibranch" + project = "scallop" + conversation_profile = "abalone" expected = "projects/{project}/conversationProfiles/{conversation_profile}".format( project=project, conversation_profile=conversation_profile, @@ -10839,8 +10994,8 @@ def test_conversation_profile_path(): def test_parse_conversation_profile_path(): expected = { - "project": "cuttlefish", - "conversation_profile": "mussel", + "project": "squid", + "conversation_profile": "clam", } path = ConversationsClient.conversation_profile_path(**expected) @@ -10850,9 +11005,9 @@ def test_parse_conversation_profile_path(): def test_cx_security_settings_path(): - project = "winkle" - location = "nautilus" - security_settings = "scallop" + project = "whelk" + location = "octopus" + security_settings = "oyster" expected = "projects/{project}/locations/{location}/securitySettings/{security_settings}".format( project=project, location=location, @@ -10866,9 +11021,9 @@ def test_cx_security_settings_path(): def test_parse_cx_security_settings_path(): expected = { - "project": "abalone", - "location": "squid", - "security_settings": "clam", + "project": "nudibranch", + "location": "cuttlefish", + "security_settings": "mussel", } path = ConversationsClient.cx_security_settings_path(**expected) @@ -10878,10 +11033,10 @@ def test_parse_cx_security_settings_path(): def test_data_store_path(): - project = "whelk" - location = "octopus" - collection = "oyster" - data_store = "nudibranch" + project = "winkle" + location = "nautilus" + collection = "scallop" + data_store = "abalone" expected = "projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}".format( project=project, location=location, @@ -10896,10 +11051,10 @@ def test_data_store_path(): def test_parse_data_store_path(): expected = { - "project": "cuttlefish", - "location": "mussel", - "collection": "winkle", - "data_store": "nautilus", + "project": "squid", + "location": "clam", + "collection": "whelk", + "data_store": "octopus", } path = ConversationsClient.data_store_path(**expected) @@ -10909,9 +11064,9 @@ def test_parse_data_store_path(): def test_document_path(): - project = "scallop" - knowledge_base = "abalone" - document = "squid" + project = "oyster" + knowledge_base = "nudibranch" + document = "cuttlefish" expected = "projects/{project}/knowledgeBases/{knowledge_base}/documents/{document}".format( project=project, knowledge_base=knowledge_base, @@ -10923,9 +11078,9 @@ def test_document_path(): def test_parse_document_path(): expected = { - "project": "clam", - "knowledge_base": "whelk", - "document": "octopus", + "project": "mussel", + "knowledge_base": "winkle", + "document": "nautilus", } path = ConversationsClient.document_path(**expected) @@ -10935,9 +11090,9 @@ def test_parse_document_path(): def test_generator_path(): - project = "oyster" - location = "nudibranch" - generator = "cuttlefish" + project = "scallop" + location = "abalone" + generator = "squid" expected = "projects/{project}/locations/{location}/generators/{generator}".format( project=project, location=location, @@ -10949,9 +11104,9 @@ def test_generator_path(): def test_parse_generator_path(): expected = { - "project": "mussel", - "location": "winkle", - "generator": "nautilus", + "project": "clam", + "location": "whelk", + "generator": "octopus", } path = ConversationsClient.generator_path(**expected) @@ -10961,8 +11116,8 @@ def test_parse_generator_path(): def test_knowledge_base_path(): - project = "scallop" - knowledge_base = "abalone" + project = "oyster" + knowledge_base = "nudibranch" expected = "projects/{project}/knowledgeBases/{knowledge_base}".format( project=project, knowledge_base=knowledge_base, @@ -10973,8 +11128,8 @@ def test_knowledge_base_path(): def test_parse_knowledge_base_path(): expected = { - "project": "squid", - "knowledge_base": "clam", + "project": "cuttlefish", + "knowledge_base": "mussel", } path = ConversationsClient.knowledge_base_path(**expected) @@ -10984,9 +11139,9 @@ def test_parse_knowledge_base_path(): def test_message_path(): - project = "whelk" - conversation = "octopus" - message = "oyster" + project = "winkle" + conversation = "nautilus" + message = "scallop" expected = ( "projects/{project}/conversations/{conversation}/messages/{message}".format( project=project, @@ -11000,9 +11155,9 @@ def test_message_path(): def test_parse_message_path(): expected = { - "project": "nudibranch", - "conversation": "cuttlefish", - "message": "mussel", + "project": "abalone", + "conversation": "squid", + "message": "clam", } path = ConversationsClient.message_path(**expected) @@ -11012,9 +11167,9 @@ def test_parse_message_path(): def test_phrase_set_path(): - project = "winkle" - location = "nautilus" - phrase_set = "scallop" + project = "whelk" + location = "octopus" + phrase_set = "oyster" expected = "projects/{project}/locations/{location}/phraseSets/{phrase_set}".format( project=project, location=location, @@ -11026,9 +11181,9 @@ def test_phrase_set_path(): def test_parse_phrase_set_path(): expected = { - "project": "abalone", - "location": "squid", - "phrase_set": "clam", + "project": "nudibranch", + "location": "cuttlefish", + "phrase_set": "mussel", } path = ConversationsClient.phrase_set_path(**expected) @@ -11038,9 +11193,9 @@ def test_parse_phrase_set_path(): def test_tool_path(): - project = "whelk" - location = "octopus" - tool = "oyster" + project = "winkle" + location = "nautilus" + tool = "scallop" expected = "projects/{project}/locations/{location}/tools/{tool}".format( project=project, location=location, @@ -11052,9 +11207,38 @@ def test_tool_path(): def test_parse_tool_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "tool": "mussel", + "project": "abalone", + "location": "squid", + "tool": "clam", + } + path = ConversationsClient.tool_path(**expected) + + # Check that the path construction is reversible. + actual = ConversationsClient.parse_tool_path(path) + assert expected == actual + + +def test_tool_path(): + project = "whelk" + location = "octopus" + app = "oyster" + tool = "nudibranch" + expected = "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + actual = ConversationsClient.tool_path(project, location, app, tool) + assert expected == actual + + +def test_parse_tool_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "app": "winkle", + "tool": "nautilus", } path = ConversationsClient.tool_path(**expected) @@ -11063,8 +11247,39 @@ def test_parse_tool_path(): assert expected == actual +def test_toolset_path(): + project = "scallop" + location = "abalone" + app = "squid" + toolset = "clam" + expected = ( + "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + ) + actual = ConversationsClient.toolset_path(project, location, app, toolset) + assert expected == actual + + +def test_parse_toolset_path(): + expected = { + "project": "whelk", + "location": "octopus", + "app": "oyster", + "toolset": "nudibranch", + } + path = ConversationsClient.toolset_path(**expected) + + # Check that the path construction is reversible. + actual = ConversationsClient.parse_toolset_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -11074,7 +11289,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "mussel", } path = ConversationsClient.common_billing_account_path(**expected) @@ -11084,7 +11299,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -11094,7 +11309,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "nautilus", } path = ConversationsClient.common_folder_path(**expected) @@ -11104,7 +11319,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -11114,7 +11329,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "abalone", } path = ConversationsClient.common_organization_path(**expected) @@ -11124,7 +11339,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -11134,7 +11349,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "clam", } path = ConversationsClient.common_project_path(**expected) @@ -11144,8 +11359,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -11156,8 +11371,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "oyster", + "location": "nudibranch", } path = ConversationsClient.common_location_path(**expected) diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_generator_evaluations.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_generator_evaluations.py index bfd7170a00cc..aa347dd12178 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_generator_evaluations.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_generator_evaluations.py @@ -75,10 +75,14 @@ ) from google.cloud.dialogflow_v2.types import ( agent_coaching_instruction, + ces_app, + ces_tool, generator, generator_evaluation, operations, + tool, tool_call, + toolset, ) from google.cloud.dialogflow_v2.types import ( generator_evaluation as gcd_generator_evaluation, @@ -4320,6 +4324,9 @@ def test_create_generator_evaluation_rest_call_success(request_type): { "tool_call": { "tool": "tool_value", + "ces_tool": "ces_tool_value", + "ces_toolset": "ces_toolset_value", + "ces_app": "ces_app_value", "tool_display_name": "tool_display_name_value", "tool_display_details": "tool_display_details_value", "action": "action_value", @@ -4330,6 +4337,9 @@ def test_create_generator_evaluation_rest_call_success(request_type): }, "tool_call_result": { "tool": "tool_value", + "ces_toolset": "ces_toolset_value", + "ces_tool": "ces_tool_value", + "ces_app": "ces_app_value", "action": "action_value", "error": {"message": "message_value"}, "raw_content": b"raw_content_blob", @@ -4360,6 +4370,19 @@ def test_create_generator_evaluation_rest_call_success(request_type): "enable_deduping": True, "similarity_threshold": 0.21630000000000002, }, + "toolset_tools": [ + { + "toolset": "toolset_value", + "operation_id": "operation_id_value", + "confirmation_requirement": 1, + } + ], + "ces_tool_specs": [ + {"ces_tool": "ces_tool_value", "confirmation_requirement": 1} + ], + "ces_app_specs": [ + {"ces_app": "ces_app_value", "confirmation_requirement": 1} + ], }, "summarization_metrics": { "summarization_evaluation_results": [ @@ -5926,10 +5949,36 @@ def test_generator_evaluations_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_generator_path(): +def test_app_path(): project = "squid" location = "clam" - generator = "whelk" + app = "whelk" + expected = "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + actual = GeneratorEvaluationsClient.app_path(project, location, app) + assert expected == actual + + +def test_parse_app_path(): + expected = { + "project": "octopus", + "location": "oyster", + "app": "nudibranch", + } + path = GeneratorEvaluationsClient.app_path(**expected) + + # Check that the path construction is reversible. + actual = GeneratorEvaluationsClient.parse_app_path(path) + assert expected == actual + + +def test_generator_path(): + project = "cuttlefish" + location = "mussel" + generator = "winkle" expected = "projects/{project}/locations/{location}/generators/{generator}".format( project=project, location=location, @@ -5941,9 +5990,9 @@ def test_generator_path(): def test_parse_generator_path(): expected = { - "project": "octopus", - "location": "oyster", - "generator": "nudibranch", + "project": "nautilus", + "location": "scallop", + "generator": "abalone", } path = GeneratorEvaluationsClient.generator_path(**expected) @@ -5953,10 +6002,10 @@ def test_parse_generator_path(): def test_generator_evaluation_path(): - project = "cuttlefish" - location = "mussel" - generator = "winkle" - evaluation = "nautilus" + project = "squid" + location = "clam" + generator = "whelk" + evaluation = "octopus" expected = "projects/{project}/locations/{location}/generators/{generator}/evaluations/{evaluation}".format( project=project, location=location, @@ -5971,10 +6020,10 @@ def test_generator_evaluation_path(): def test_parse_generator_evaluation_path(): expected = { - "project": "scallop", - "location": "abalone", - "generator": "squid", - "evaluation": "clam", + "project": "oyster", + "location": "nudibranch", + "generator": "cuttlefish", + "evaluation": "mussel", } path = GeneratorEvaluationsClient.generator_evaluation_path(**expected) @@ -5984,9 +6033,38 @@ def test_parse_generator_evaluation_path(): def test_tool_path(): - project = "whelk" - location = "octopus" - tool = "oyster" + project = "winkle" + location = "nautilus" + app = "scallop" + tool = "abalone" + expected = "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + actual = GeneratorEvaluationsClient.tool_path(project, location, app, tool) + assert expected == actual + + +def test_parse_tool_path(): + expected = { + "project": "squid", + "location": "clam", + "app": "whelk", + "tool": "octopus", + } + path = GeneratorEvaluationsClient.tool_path(**expected) + + # Check that the path construction is reversible. + actual = GeneratorEvaluationsClient.parse_tool_path(path) + assert expected == actual + + +def test_tool_path(): + project = "oyster" + location = "nudibranch" + tool = "cuttlefish" expected = "projects/{project}/locations/{location}/tools/{tool}".format( project=project, location=location, @@ -5998,9 +6076,9 @@ def test_tool_path(): def test_parse_tool_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "tool": "mussel", + "project": "mussel", + "location": "winkle", + "tool": "nautilus", } path = GeneratorEvaluationsClient.tool_path(**expected) @@ -6009,8 +6087,39 @@ def test_parse_tool_path(): assert expected == actual +def test_toolset_path(): + project = "scallop" + location = "abalone" + app = "squid" + toolset = "clam" + expected = ( + "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + ) + actual = GeneratorEvaluationsClient.toolset_path(project, location, app, toolset) + assert expected == actual + + +def test_parse_toolset_path(): + expected = { + "project": "whelk", + "location": "octopus", + "app": "oyster", + "toolset": "nudibranch", + } + path = GeneratorEvaluationsClient.toolset_path(**expected) + + # Check that the path construction is reversible. + actual = GeneratorEvaluationsClient.parse_toolset_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -6020,7 +6129,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "mussel", } path = GeneratorEvaluationsClient.common_billing_account_path(**expected) @@ -6030,7 +6139,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -6040,7 +6149,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "nautilus", } path = GeneratorEvaluationsClient.common_folder_path(**expected) @@ -6050,7 +6159,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -6060,7 +6169,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "abalone", } path = GeneratorEvaluationsClient.common_organization_path(**expected) @@ -6070,7 +6179,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -6080,7 +6189,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "clam", } path = GeneratorEvaluationsClient.common_project_path(**expected) @@ -6090,8 +6199,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -6102,8 +6211,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "oyster", + "location": "nudibranch", } path = GeneratorEvaluationsClient.common_location_path(**expected) diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_generators.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_generators.py index bd8a0c9ee9fd..7955a57467c8 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_generators.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_generators.py @@ -70,8 +70,12 @@ ) from google.cloud.dialogflow_v2.types import ( agent_coaching_instruction, + ces_app, + ces_tool, generator, + tool, tool_call, + toolset, ) from google.cloud.dialogflow_v2.types import generator as gcd_generator @@ -4631,6 +4635,9 @@ def test_create_generator_rest_call_success(request_type): { "tool_call": { "tool": "tool_value", + "ces_tool": "ces_tool_value", + "ces_toolset": "ces_toolset_value", + "ces_app": "ces_app_value", "tool_display_name": "tool_display_name_value", "tool_display_details": "tool_display_details_value", "action": "action_value", @@ -4641,6 +4648,9 @@ def test_create_generator_rest_call_success(request_type): }, "tool_call_result": { "tool": "tool_value", + "ces_toolset": "ces_toolset_value", + "ces_tool": "ces_tool_value", + "ces_app": "ces_app_value", "action": "action_value", "error": {"message": "message_value"}, "raw_content": b"raw_content_blob", @@ -4671,6 +4681,17 @@ def test_create_generator_rest_call_success(request_type): "enable_deduping": True, "similarity_threshold": 0.21630000000000002, }, + "toolset_tools": [ + { + "toolset": "toolset_value", + "operation_id": "operation_id_value", + "confirmation_requirement": 1, + } + ], + "ces_tool_specs": [ + {"ces_tool": "ces_tool_value", "confirmation_requirement": 1} + ], + "ces_app_specs": [{"ces_app": "ces_app_value", "confirmation_requirement": 1}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5336,6 +5357,9 @@ def test_update_generator_rest_call_success(request_type): { "tool_call": { "tool": "tool_value", + "ces_tool": "ces_tool_value", + "ces_toolset": "ces_toolset_value", + "ces_app": "ces_app_value", "tool_display_name": "tool_display_name_value", "tool_display_details": "tool_display_details_value", "action": "action_value", @@ -5346,6 +5370,9 @@ def test_update_generator_rest_call_success(request_type): }, "tool_call_result": { "tool": "tool_value", + "ces_toolset": "ces_toolset_value", + "ces_tool": "ces_tool_value", + "ces_app": "ces_app_value", "action": "action_value", "error": {"message": "message_value"}, "raw_content": b"raw_content_blob", @@ -5376,6 +5403,17 @@ def test_update_generator_rest_call_success(request_type): "enable_deduping": True, "similarity_threshold": 0.21630000000000002, }, + "toolset_tools": [ + { + "toolset": "toolset_value", + "operation_id": "operation_id_value", + "confirmation_requirement": 1, + } + ], + "ces_tool_specs": [ + {"ces_tool": "ces_tool_value", "confirmation_requirement": 1} + ], + "ces_app_specs": [{"ces_app": "ces_app_value", "confirmation_requirement": 1}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6410,10 +6448,36 @@ def test_generators_transport_channel_mtls_with_adc(transport_class): assert transport.grpc_channel == mock_grpc_channel -def test_generator_path(): +def test_app_path(): project = "squid" location = "clam" - generator = "whelk" + app = "whelk" + expected = "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + actual = GeneratorsClient.app_path(project, location, app) + assert expected == actual + + +def test_parse_app_path(): + expected = { + "project": "octopus", + "location": "oyster", + "app": "nudibranch", + } + path = GeneratorsClient.app_path(**expected) + + # Check that the path construction is reversible. + actual = GeneratorsClient.parse_app_path(path) + assert expected == actual + + +def test_generator_path(): + project = "cuttlefish" + location = "mussel" + generator = "winkle" expected = "projects/{project}/locations/{location}/generators/{generator}".format( project=project, location=location, @@ -6425,9 +6489,9 @@ def test_generator_path(): def test_parse_generator_path(): expected = { - "project": "octopus", - "location": "oyster", - "generator": "nudibranch", + "project": "nautilus", + "location": "scallop", + "generator": "abalone", } path = GeneratorsClient.generator_path(**expected) @@ -6437,9 +6501,38 @@ def test_parse_generator_path(): def test_tool_path(): - project = "cuttlefish" - location = "mussel" - tool = "winkle" + project = "squid" + location = "clam" + app = "whelk" + tool = "octopus" + expected = "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + actual = GeneratorsClient.tool_path(project, location, app, tool) + assert expected == actual + + +def test_parse_tool_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "app": "cuttlefish", + "tool": "mussel", + } + path = GeneratorsClient.tool_path(**expected) + + # Check that the path construction is reversible. + actual = GeneratorsClient.parse_tool_path(path) + assert expected == actual + + +def test_tool_path(): + project = "winkle" + location = "nautilus" + tool = "scallop" expected = "projects/{project}/locations/{location}/tools/{tool}".format( project=project, location=location, @@ -6451,9 +6544,9 @@ def test_tool_path(): def test_parse_tool_path(): expected = { - "project": "nautilus", - "location": "scallop", - "tool": "abalone", + "project": "abalone", + "location": "squid", + "tool": "clam", } path = GeneratorsClient.tool_path(**expected) @@ -6462,8 +6555,39 @@ def test_parse_tool_path(): assert expected == actual +def test_toolset_path(): + project = "whelk" + location = "octopus" + app = "oyster" + toolset = "nudibranch" + expected = ( + "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + ) + actual = GeneratorsClient.toolset_path(project, location, app, toolset) + assert expected == actual + + +def test_parse_toolset_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "app": "winkle", + "toolset": "nautilus", + } + path = GeneratorsClient.toolset_path(**expected) + + # Check that the path construction is reversible. + actual = GeneratorsClient.parse_toolset_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "squid" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -6473,7 +6597,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "clam", + "billing_account": "abalone", } path = GeneratorsClient.common_billing_account_path(**expected) @@ -6483,7 +6607,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "whelk" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -6493,7 +6617,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "octopus", + "folder": "clam", } path = GeneratorsClient.common_folder_path(**expected) @@ -6503,7 +6627,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "oyster" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -6513,7 +6637,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nudibranch", + "organization": "octopus", } path = GeneratorsClient.common_organization_path(**expected) @@ -6523,7 +6647,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "cuttlefish" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -6533,7 +6657,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "mussel", + "project": "nudibranch", } path = GeneratorsClient.common_project_path(**expected) @@ -6543,8 +6667,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "winkle" - location = "nautilus" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -6555,8 +6679,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "scallop", - "location": "abalone", + "project": "winkle", + "location": "nautilus", } path = GeneratorsClient.common_location_path(**expected) diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_participants.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_participants.py index 962ac7ab2177..58e9ee94e9ea 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_participants.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_participants.py @@ -9518,10 +9518,36 @@ def test_parse_answer_record_path(): assert expected == actual -def test_context_path(): +def test_app_path(): project = "oyster" - session = "nudibranch" - context = "cuttlefish" + location = "nudibranch" + app = "cuttlefish" + expected = "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + actual = ParticipantsClient.app_path(project, location, app) + assert expected == actual + + +def test_parse_app_path(): + expected = { + "project": "mussel", + "location": "winkle", + "app": "nautilus", + } + path = ParticipantsClient.app_path(**expected) + + # Check that the path construction is reversible. + actual = ParticipantsClient.parse_app_path(path) + assert expected == actual + + +def test_context_path(): + project = "scallop" + session = "abalone" + context = "squid" expected = "projects/{project}/agent/sessions/{session}/contexts/{context}".format( project=project, session=session, @@ -9533,9 +9559,9 @@ def test_context_path(): def test_parse_context_path(): expected = { - "project": "mussel", - "session": "winkle", - "context": "nautilus", + "project": "clam", + "session": "whelk", + "context": "octopus", } path = ParticipantsClient.context_path(**expected) @@ -9545,8 +9571,8 @@ def test_parse_context_path(): def test_intent_path(): - project = "scallop" - intent = "abalone" + project = "oyster" + intent = "nudibranch" expected = "projects/{project}/agent/intents/{intent}".format( project=project, intent=intent, @@ -9557,8 +9583,8 @@ def test_intent_path(): def test_parse_intent_path(): expected = { - "project": "squid", - "intent": "clam", + "project": "cuttlefish", + "intent": "mussel", } path = ParticipantsClient.intent_path(**expected) @@ -9568,9 +9594,9 @@ def test_parse_intent_path(): def test_message_path(): - project = "whelk" - conversation = "octopus" - message = "oyster" + project = "winkle" + conversation = "nautilus" + message = "scallop" expected = ( "projects/{project}/conversations/{conversation}/messages/{message}".format( project=project, @@ -9584,9 +9610,9 @@ def test_message_path(): def test_parse_message_path(): expected = { - "project": "nudibranch", - "conversation": "cuttlefish", - "message": "mussel", + "project": "abalone", + "conversation": "squid", + "message": "clam", } path = ParticipantsClient.message_path(**expected) @@ -9596,9 +9622,9 @@ def test_parse_message_path(): def test_participant_path(): - project = "winkle" - conversation = "nautilus" - participant = "scallop" + project = "whelk" + conversation = "octopus" + participant = "oyster" expected = "projects/{project}/conversations/{conversation}/participants/{participant}".format( project=project, conversation=conversation, @@ -9610,9 +9636,9 @@ def test_participant_path(): def test_parse_participant_path(): expected = { - "project": "abalone", - "conversation": "squid", - "participant": "clam", + "project": "nudibranch", + "conversation": "cuttlefish", + "participant": "mussel", } path = ParticipantsClient.participant_path(**expected) @@ -9622,9 +9648,9 @@ def test_parse_participant_path(): def test_phrase_set_path(): - project = "whelk" - location = "octopus" - phrase_set = "oyster" + project = "winkle" + location = "nautilus" + phrase_set = "scallop" expected = "projects/{project}/locations/{location}/phraseSets/{phrase_set}".format( project=project, location=location, @@ -9636,9 +9662,9 @@ def test_phrase_set_path(): def test_parse_phrase_set_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "phrase_set": "mussel", + "project": "abalone", + "location": "squid", + "phrase_set": "clam", } path = ParticipantsClient.phrase_set_path(**expected) @@ -9648,9 +9674,9 @@ def test_parse_phrase_set_path(): def test_session_entity_type_path(): - project = "winkle" - session = "nautilus" - entity_type = "scallop" + project = "whelk" + session = "octopus" + entity_type = "oyster" expected = ( "projects/{project}/agent/sessions/{session}/entityTypes/{entity_type}".format( project=project, @@ -9664,9 +9690,9 @@ def test_session_entity_type_path(): def test_parse_session_entity_type_path(): expected = { - "project": "abalone", - "session": "squid", - "entity_type": "clam", + "project": "nudibranch", + "session": "cuttlefish", + "entity_type": "mussel", } path = ParticipantsClient.session_entity_type_path(**expected) @@ -9676,9 +9702,9 @@ def test_parse_session_entity_type_path(): def test_tool_path(): - project = "whelk" - location = "octopus" - tool = "oyster" + project = "winkle" + location = "nautilus" + tool = "scallop" expected = "projects/{project}/locations/{location}/tools/{tool}".format( project=project, location=location, @@ -9690,9 +9716,38 @@ def test_tool_path(): def test_parse_tool_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "tool": "mussel", + "project": "abalone", + "location": "squid", + "tool": "clam", + } + path = ParticipantsClient.tool_path(**expected) + + # Check that the path construction is reversible. + actual = ParticipantsClient.parse_tool_path(path) + assert expected == actual + + +def test_tool_path(): + project = "whelk" + location = "octopus" + app = "oyster" + tool = "nudibranch" + expected = "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + actual = ParticipantsClient.tool_path(project, location, app, tool) + assert expected == actual + + +def test_parse_tool_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "app": "winkle", + "tool": "nautilus", } path = ParticipantsClient.tool_path(**expected) @@ -9701,8 +9756,39 @@ def test_parse_tool_path(): assert expected == actual +def test_toolset_path(): + project = "scallop" + location = "abalone" + app = "squid" + toolset = "clam" + expected = ( + "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + ) + actual = ParticipantsClient.toolset_path(project, location, app, toolset) + assert expected == actual + + +def test_parse_toolset_path(): + expected = { + "project": "whelk", + "location": "octopus", + "app": "oyster", + "toolset": "nudibranch", + } + path = ParticipantsClient.toolset_path(**expected) + + # Check that the path construction is reversible. + actual = ParticipantsClient.parse_toolset_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -9712,7 +9798,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "mussel", } path = ParticipantsClient.common_billing_account_path(**expected) @@ -9722,7 +9808,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -9732,7 +9818,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "nautilus", } path = ParticipantsClient.common_folder_path(**expected) @@ -9742,7 +9828,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -9752,7 +9838,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "abalone", } path = ParticipantsClient.common_organization_path(**expected) @@ -9762,7 +9848,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -9772,7 +9858,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "clam", } path = ParticipantsClient.common_project_path(**expected) @@ -9782,8 +9868,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -9794,8 +9880,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "oyster", + "location": "nudibranch", } path = ParticipantsClient.common_location_path(**expected) diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_answer_records.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_answer_records.py index 521093b9d3b0..0ff9d6302f67 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_answer_records.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_answer_records.py @@ -3815,6 +3815,9 @@ def test_update_answer_record_rest_call_success(request_type): { "tool_call": { "tool": "tool_value", + "ces_tool": "ces_tool_value", + "ces_toolset": "ces_toolset_value", + "ces_app": "ces_app_value", "tool_display_name": "tool_display_name_value", "tool_display_details": "tool_display_details_value", "action": "action_value", @@ -3825,6 +3828,9 @@ def test_update_answer_record_rest_call_success(request_type): }, "tool_call_result": { "tool": "tool_value", + "ces_tool": "ces_tool_value", + "ces_toolset": "ces_toolset_value", + "ces_app": "ces_app_value", "action": "action_value", "error": {"message": "message_value"}, "raw_content": b"raw_content_blob", @@ -4858,10 +4864,36 @@ def test_parse_answer_record_path(): assert expected == actual -def test_context_path(): +def test_app_path(): project = "oyster" - session = "nudibranch" - context = "cuttlefish" + location = "nudibranch" + app = "cuttlefish" + expected = "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + actual = AnswerRecordsClient.app_path(project, location, app) + assert expected == actual + + +def test_parse_app_path(): + expected = { + "project": "mussel", + "location": "winkle", + "app": "nautilus", + } + path = AnswerRecordsClient.app_path(**expected) + + # Check that the path construction is reversible. + actual = AnswerRecordsClient.parse_app_path(path) + assert expected == actual + + +def test_context_path(): + project = "scallop" + session = "abalone" + context = "squid" expected = "projects/{project}/agent/sessions/{session}/contexts/{context}".format( project=project, session=session, @@ -4873,9 +4905,9 @@ def test_context_path(): def test_parse_context_path(): expected = { - "project": "mussel", - "session": "winkle", - "context": "nautilus", + "project": "clam", + "session": "whelk", + "context": "octopus", } path = AnswerRecordsClient.context_path(**expected) @@ -4885,9 +4917,9 @@ def test_parse_context_path(): def test_document_path(): - project = "scallop" - knowledge_base = "abalone" - document = "squid" + project = "oyster" + knowledge_base = "nudibranch" + document = "cuttlefish" expected = "projects/{project}/knowledgeBases/{knowledge_base}/documents/{document}".format( project=project, knowledge_base=knowledge_base, @@ -4899,9 +4931,9 @@ def test_document_path(): def test_parse_document_path(): expected = { - "project": "clam", - "knowledge_base": "whelk", - "document": "octopus", + "project": "mussel", + "knowledge_base": "winkle", + "document": "nautilus", } path = AnswerRecordsClient.document_path(**expected) @@ -4911,8 +4943,8 @@ def test_parse_document_path(): def test_intent_path(): - project = "oyster" - intent = "nudibranch" + project = "scallop" + intent = "abalone" expected = "projects/{project}/agent/intents/{intent}".format( project=project, intent=intent, @@ -4923,8 +4955,8 @@ def test_intent_path(): def test_parse_intent_path(): expected = { - "project": "cuttlefish", - "intent": "mussel", + "project": "squid", + "intent": "clam", } path = AnswerRecordsClient.intent_path(**expected) @@ -4934,9 +4966,38 @@ def test_parse_intent_path(): def test_tool_path(): - project = "winkle" - location = "nautilus" - tool = "scallop" + project = "whelk" + location = "octopus" + app = "oyster" + tool = "nudibranch" + expected = "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + actual = AnswerRecordsClient.tool_path(project, location, app, tool) + assert expected == actual + + +def test_parse_tool_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "app": "winkle", + "tool": "nautilus", + } + path = AnswerRecordsClient.tool_path(**expected) + + # Check that the path construction is reversible. + actual = AnswerRecordsClient.parse_tool_path(path) + assert expected == actual + + +def test_tool_path(): + project = "scallop" + location = "abalone" + tool = "squid" expected = "projects/{project}/locations/{location}/tools/{tool}".format( project=project, location=location, @@ -4948,9 +5009,9 @@ def test_tool_path(): def test_parse_tool_path(): expected = { - "project": "abalone", - "location": "squid", - "tool": "clam", + "project": "clam", + "location": "whelk", + "tool": "octopus", } path = AnswerRecordsClient.tool_path(**expected) @@ -4959,8 +5020,39 @@ def test_parse_tool_path(): assert expected == actual +def test_toolset_path(): + project = "oyster" + location = "nudibranch" + app = "cuttlefish" + toolset = "mussel" + expected = ( + "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + ) + actual = AnswerRecordsClient.toolset_path(project, location, app, toolset) + assert expected == actual + + +def test_parse_toolset_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "app": "scallop", + "toolset": "abalone", + } + path = AnswerRecordsClient.toolset_path(**expected) + + # Check that the path construction is reversible. + actual = AnswerRecordsClient.parse_toolset_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -4970,7 +5062,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "clam", } path = AnswerRecordsClient.common_billing_account_path(**expected) @@ -4980,7 +5072,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) @@ -4990,7 +5082,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "octopus", } path = AnswerRecordsClient.common_folder_path(**expected) @@ -5000,7 +5092,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) @@ -5010,7 +5102,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "nudibranch", } path = AnswerRecordsClient.common_organization_path(**expected) @@ -5020,7 +5112,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "cuttlefish" expected = "projects/{project}".format( project=project, ) @@ -5030,7 +5122,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "mussel", } path = AnswerRecordsClient.common_project_path(**expected) @@ -5040,8 +5132,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -5052,8 +5144,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "scallop", + "location": "abalone", } path = AnswerRecordsClient.common_location_path(**expected) diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py index 5ab875a76843..0bc929fb9e1a 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py @@ -1971,6 +1971,7 @@ def test_get_conversation_profile(request_type, transport: str = "grpc"): call.return_value = conversation_profile.ConversationProfile( name="name_value", display_name="display_name_value", + use_bidi_streaming=True, language_code="language_code_value", time_zone="time_zone_value", security_settings="security_settings_value", @@ -1987,6 +1988,7 @@ def test_get_conversation_profile(request_type, transport: str = "grpc"): assert isinstance(response, conversation_profile.ConversationProfile) assert response.name == "name_value" assert response.display_name == "display_name_value" + assert response.use_bidi_streaming is True assert response.language_code == "language_code_value" assert response.time_zone == "time_zone_value" assert response.security_settings == "security_settings_value" @@ -2127,6 +2129,7 @@ async def test_get_conversation_profile_async( conversation_profile.ConversationProfile( name="name_value", display_name="display_name_value", + use_bidi_streaming=True, language_code="language_code_value", time_zone="time_zone_value", security_settings="security_settings_value", @@ -2144,6 +2147,7 @@ async def test_get_conversation_profile_async( assert isinstance(response, conversation_profile.ConversationProfile) assert response.name == "name_value" assert response.display_name == "display_name_value" + assert response.use_bidi_streaming is True assert response.language_code == "language_code_value" assert response.time_zone == "time_zone_value" assert response.security_settings == "security_settings_value" @@ -2330,6 +2334,7 @@ def test_create_conversation_profile(request_type, transport: str = "grpc"): call.return_value = gcd_conversation_profile.ConversationProfile( name="name_value", display_name="display_name_value", + use_bidi_streaming=True, language_code="language_code_value", time_zone="time_zone_value", security_settings="security_settings_value", @@ -2346,6 +2351,7 @@ def test_create_conversation_profile(request_type, transport: str = "grpc"): assert isinstance(response, gcd_conversation_profile.ConversationProfile) assert response.name == "name_value" assert response.display_name == "display_name_value" + assert response.use_bidi_streaming is True assert response.language_code == "language_code_value" assert response.time_zone == "time_zone_value" assert response.security_settings == "security_settings_value" @@ -2486,6 +2492,7 @@ async def test_create_conversation_profile_async( gcd_conversation_profile.ConversationProfile( name="name_value", display_name="display_name_value", + use_bidi_streaming=True, language_code="language_code_value", time_zone="time_zone_value", security_settings="security_settings_value", @@ -2503,6 +2510,7 @@ async def test_create_conversation_profile_async( assert isinstance(response, gcd_conversation_profile.ConversationProfile) assert response.name == "name_value" assert response.display_name == "display_name_value" + assert response.use_bidi_streaming is True assert response.language_code == "language_code_value" assert response.time_zone == "time_zone_value" assert response.security_settings == "security_settings_value" @@ -2707,6 +2715,7 @@ def test_update_conversation_profile(request_type, transport: str = "grpc"): call.return_value = gcd_conversation_profile.ConversationProfile( name="name_value", display_name="display_name_value", + use_bidi_streaming=True, language_code="language_code_value", time_zone="time_zone_value", security_settings="security_settings_value", @@ -2723,6 +2732,7 @@ def test_update_conversation_profile(request_type, transport: str = "grpc"): assert isinstance(response, gcd_conversation_profile.ConversationProfile) assert response.name == "name_value" assert response.display_name == "display_name_value" + assert response.use_bidi_streaming is True assert response.language_code == "language_code_value" assert response.time_zone == "time_zone_value" assert response.security_settings == "security_settings_value" @@ -2859,6 +2869,7 @@ async def test_update_conversation_profile_async( gcd_conversation_profile.ConversationProfile( name="name_value", display_name="display_name_value", + use_bidi_streaming=True, language_code="language_code_value", time_zone="time_zone_value", security_settings="security_settings_value", @@ -2876,6 +2887,7 @@ async def test_update_conversation_profile_async( assert isinstance(response, gcd_conversation_profile.ConversationProfile) assert response.name == "name_value" assert response.display_name == "display_name_value" + assert response.use_bidi_streaming is True assert response.language_code == "language_code_value" assert response.time_zone == "time_zone_value" assert response.security_settings == "security_settings_value" @@ -5910,6 +5922,7 @@ async def test_get_conversation_profile_empty_call_grpc_asyncio(): conversation_profile.ConversationProfile( name="name_value", display_name="display_name_value", + use_bidi_streaming=True, language_code="language_code_value", time_zone="time_zone_value", security_settings="security_settings_value", @@ -5943,6 +5956,7 @@ async def test_create_conversation_profile_empty_call_grpc_asyncio(): gcd_conversation_profile.ConversationProfile( name="name_value", display_name="display_name_value", + use_bidi_streaming=True, language_code="language_code_value", time_zone="time_zone_value", security_settings="security_settings_value", @@ -5976,6 +5990,7 @@ async def test_update_conversation_profile_empty_call_grpc_asyncio(): gcd_conversation_profile.ConversationProfile( name="name_value", display_name="display_name_value", + use_bidi_streaming=True, language_code="language_code_value", time_zone="time_zone_value", security_settings="security_settings_value", @@ -6265,6 +6280,7 @@ def test_get_conversation_profile_rest_call_success(request_type): return_value = conversation_profile.ConversationProfile( name="name_value", display_name="display_name_value", + use_bidi_streaming=True, language_code="language_code_value", time_zone="time_zone_value", security_settings="security_settings_value", @@ -6286,6 +6302,7 @@ def test_get_conversation_profile_rest_call_success(request_type): assert isinstance(response, conversation_profile.ConversationProfile) assert response.name == "name_value" assert response.display_name == "display_name_value" + assert response.use_bidi_streaming is True assert response.language_code == "language_code_value" assert response.time_zone == "time_zone_value" assert response.security_settings == "security_settings_value" @@ -6408,6 +6425,7 @@ def test_create_conversation_profile_rest_call_success(request_type): "display_name": "display_name_value", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, + "use_bidi_streaming": True, "automated_agent_config": { "agent": "agent_value", "session_ttl": {"seconds": 751, "nanos": 543}, @@ -6475,6 +6493,7 @@ def test_create_conversation_profile_rest_call_success(request_type): "message_analysis_config": { "enable_entity_extraction": True, "enable_sentiment_analysis": True, + "enable_sentiment_analysis_v3": True, }, }, "human_agent_handoff_config": { @@ -6600,6 +6619,7 @@ def get_message_fields(field): return_value = gcd_conversation_profile.ConversationProfile( name="name_value", display_name="display_name_value", + use_bidi_streaming=True, language_code="language_code_value", time_zone="time_zone_value", security_settings="security_settings_value", @@ -6621,6 +6641,7 @@ def get_message_fields(field): assert isinstance(response, gcd_conversation_profile.ConversationProfile) assert response.name == "name_value" assert response.display_name == "display_name_value" + assert response.use_bidi_streaming is True assert response.language_code == "language_code_value" assert response.time_zone == "time_zone_value" assert response.security_settings == "security_settings_value" @@ -6751,6 +6772,7 @@ def test_update_conversation_profile_rest_call_success(request_type): "display_name": "display_name_value", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, + "use_bidi_streaming": True, "automated_agent_config": { "agent": "agent_value", "session_ttl": {"seconds": 751, "nanos": 543}, @@ -6818,6 +6840,7 @@ def test_update_conversation_profile_rest_call_success(request_type): "message_analysis_config": { "enable_entity_extraction": True, "enable_sentiment_analysis": True, + "enable_sentiment_analysis_v3": True, }, }, "human_agent_handoff_config": { @@ -6943,6 +6966,7 @@ def get_message_fields(field): return_value = gcd_conversation_profile.ConversationProfile( name="name_value", display_name="display_name_value", + use_bidi_streaming=True, language_code="language_code_value", time_zone="time_zone_value", security_settings="security_settings_value", @@ -6964,6 +6988,7 @@ def get_message_fields(field): assert isinstance(response, gcd_conversation_profile.ConversationProfile) assert response.name == "name_value" assert response.display_name == "display_name_value" + assert response.use_bidi_streaming is True assert response.language_code == "language_code_value" assert response.time_zone == "time_zone_value" assert response.security_settings == "security_settings_value" diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py index bdaf51c03f25..911a14d6a905 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py @@ -71,12 +71,16 @@ from google.cloud.dialogflow_v2beta1.types import ( agent_coaching_instruction, audio_config, + ces_app, + ces_tool, conversation, conversation_profile, generator, participant, session, + tool, tool_call, + toolset, ) from google.cloud.dialogflow_v2beta1.types import conversation as gcd_conversation @@ -8742,7 +8746,133 @@ def test_create_conversation_rest_call_success(request_type): {"mime_type": "mime_type_value", "content": b"content_blob"} ], }, + "initial_conversation_profile": { + "name": "name_value", + "display_name": "display_name_value", + "create_time": {}, + "update_time": {}, + "use_bidi_streaming": True, + "automated_agent_config": { + "agent": "agent_value", + "session_ttl": {"seconds": 751, "nanos": 543}, + }, + "human_agent_assistant_config": { + "notification_config": {"topic": "topic_value", "message_format": 1}, + "human_agent_suggestion_config": { + "feature_configs": [ + { + "suggestion_feature": {"type_": 1}, + "enable_event_based_suggestion": True, + "disable_agent_query_logging": True, + "enable_query_suggestion_when_no_answer": True, + "enable_conversation_augmented_query": True, + "enable_query_suggestion_only": True, + "enable_response_debug_info": True, + "rai_settings": { + "rai_category_configs": [ + {"category": 1, "sensitivity_level": 1} + ] + }, + "suggestion_trigger_settings": { + "no_small_talk": True, + "only_end_user": True, + }, + "query_config": { + "knowledge_base_query_source": { + "knowledge_bases": [ + "knowledge_bases_value1", + "knowledge_bases_value2", + ] + }, + "document_query_source": { + "documents": [ + "documents_value1", + "documents_value2", + ] + }, + "dialogflow_query_source": { + "agent": "agent_value", + "human_agent_side_config": {"agent": "agent_value"}, + }, + "max_results": 1207, + "confidence_threshold": 0.2106, + "context_filter_settings": { + "drop_handoff_messages": True, + "drop_virtual_agent_messages": True, + "drop_ivr_messages": True, + }, + "sections": {"section_types": [1]}, + "context_size": 1311, + }, + "conversation_model_config": { + "model": "model_value", + "baseline_model_version": "baseline_model_version_value", + }, + "conversation_process_config": { + "recent_sentences_count": 2352 + }, + } + ], + "group_suggestion_responses": True, + "generators": ["generators_value1", "generators_value2"], + "disable_high_latency_features_sync_delivery": True, + "skip_empty_event_based_suggestion": True, + "use_unredacted_conversation_data": True, + "enable_async_tool_call": True, + }, + "end_user_suggestion_config": {}, + "message_analysis_config": { + "enable_entity_extraction": True, + "enable_sentiment_analysis": True, + "enable_sentiment_analysis_v3": True, + }, + }, + "human_agent_handoff_config": { + "live_person_config": {"account_number": "account_number_value"}, + "salesforce_live_agent_config": { + "organization_id": "organization_id_value", + "deployment_id": "deployment_id_value", + "button_id": "button_id_value", + "endpoint_domain": "endpoint_domain_value", + }, + }, + "notification_config": {}, + "logging_config": {"enable_stackdriver_logging": True}, + "new_message_event_notification_config": {}, + "new_recognition_result_notification_config": {}, + "stt_config": { + "speech_model_variant": 1, + "model": "model_value", + "phrase_sets": ["phrase_sets_value1", "phrase_sets_value2"], + "audio_encoding": 1, + "sample_rate_hertz": 1817, + "language_code": "language_code_value", + "enable_word_info": True, + "use_timeout_based_endpointing": True, + }, + "language_code": "language_code_value", + "time_zone": "time_zone_value", + "security_settings": "security_settings_value", + "tts_config": { + "speaking_rate": 0.1373, + "pitch": 0.536, + "volume_gain_db": 0.1467, + "effects_profile_id": [ + "effects_profile_id_value1", + "effects_profile_id_value2", + ], + "voice": {"name": "name_value", "ssml_gender": 1}, + "pronunciations": [ + { + "phrase": "phrase_value", + "phonetic_encoding": 1, + "pronunciation": "pronunciation_value", + } + ], + }, + }, "ingested_context_references": {}, + "initial_generator_contexts": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -11527,9 +11657,35 @@ def test_parse_answer_record_path(): assert expected == actual -def test_conversation_path(): +def test_app_path(): project = "cuttlefish" - conversation = "mussel" + location = "mussel" + app = "winkle" + expected = "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + actual = ConversationsClient.app_path(project, location, app) + assert expected == actual + + +def test_parse_app_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "app": "abalone", + } + path = ConversationsClient.app_path(**expected) + + # Check that the path construction is reversible. + actual = ConversationsClient.parse_app_path(path) + assert expected == actual + + +def test_conversation_path(): + project = "squid" + conversation = "clam" expected = "projects/{project}/conversations/{conversation}".format( project=project, conversation=conversation, @@ -11540,8 +11696,8 @@ def test_conversation_path(): def test_parse_conversation_path(): expected = { - "project": "winkle", - "conversation": "nautilus", + "project": "whelk", + "conversation": "octopus", } path = ConversationsClient.conversation_path(**expected) @@ -11551,9 +11707,9 @@ def test_parse_conversation_path(): def test_conversation_model_path(): - project = "scallop" - location = "abalone" - conversation_model = "squid" + project = "oyster" + location = "nudibranch" + conversation_model = "cuttlefish" expected = "projects/{project}/locations/{location}/conversationModels/{conversation_model}".format( project=project, location=location, @@ -11567,9 +11723,9 @@ def test_conversation_model_path(): def test_parse_conversation_model_path(): expected = { - "project": "clam", - "location": "whelk", - "conversation_model": "octopus", + "project": "mussel", + "location": "winkle", + "conversation_model": "nautilus", } path = ConversationsClient.conversation_model_path(**expected) @@ -11579,8 +11735,8 @@ def test_parse_conversation_model_path(): def test_conversation_profile_path(): - project = "oyster" - conversation_profile = "nudibranch" + project = "scallop" + conversation_profile = "abalone" expected = "projects/{project}/conversationProfiles/{conversation_profile}".format( project=project, conversation_profile=conversation_profile, @@ -11593,8 +11749,8 @@ def test_conversation_profile_path(): def test_parse_conversation_profile_path(): expected = { - "project": "cuttlefish", - "conversation_profile": "mussel", + "project": "squid", + "conversation_profile": "clam", } path = ConversationsClient.conversation_profile_path(**expected) @@ -11604,9 +11760,9 @@ def test_parse_conversation_profile_path(): def test_cx_security_settings_path(): - project = "winkle" - location = "nautilus" - security_settings = "scallop" + project = "whelk" + location = "octopus" + security_settings = "oyster" expected = "projects/{project}/locations/{location}/securitySettings/{security_settings}".format( project=project, location=location, @@ -11620,9 +11776,9 @@ def test_cx_security_settings_path(): def test_parse_cx_security_settings_path(): expected = { - "project": "abalone", - "location": "squid", - "security_settings": "clam", + "project": "nudibranch", + "location": "cuttlefish", + "security_settings": "mussel", } path = ConversationsClient.cx_security_settings_path(**expected) @@ -11632,10 +11788,10 @@ def test_parse_cx_security_settings_path(): def test_data_store_path(): - project = "whelk" - location = "octopus" - collection = "oyster" - data_store = "nudibranch" + project = "winkle" + location = "nautilus" + collection = "scallop" + data_store = "abalone" expected = "projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}".format( project=project, location=location, @@ -11650,10 +11806,10 @@ def test_data_store_path(): def test_parse_data_store_path(): expected = { - "project": "cuttlefish", - "location": "mussel", - "collection": "winkle", - "data_store": "nautilus", + "project": "squid", + "location": "clam", + "collection": "whelk", + "data_store": "octopus", } path = ConversationsClient.data_store_path(**expected) @@ -11663,9 +11819,9 @@ def test_parse_data_store_path(): def test_document_path(): - project = "scallop" - knowledge_base = "abalone" - document = "squid" + project = "oyster" + knowledge_base = "nudibranch" + document = "cuttlefish" expected = "projects/{project}/knowledgeBases/{knowledge_base}/documents/{document}".format( project=project, knowledge_base=knowledge_base, @@ -11677,9 +11833,9 @@ def test_document_path(): def test_parse_document_path(): expected = { - "project": "clam", - "knowledge_base": "whelk", - "document": "octopus", + "project": "mussel", + "knowledge_base": "winkle", + "document": "nautilus", } path = ConversationsClient.document_path(**expected) @@ -11689,9 +11845,9 @@ def test_parse_document_path(): def test_generator_path(): - project = "oyster" - location = "nudibranch" - generator = "cuttlefish" + project = "scallop" + location = "abalone" + generator = "squid" expected = "projects/{project}/locations/{location}/generators/{generator}".format( project=project, location=location, @@ -11703,9 +11859,9 @@ def test_generator_path(): def test_parse_generator_path(): expected = { - "project": "mussel", - "location": "winkle", - "generator": "nautilus", + "project": "clam", + "location": "whelk", + "generator": "octopus", } path = ConversationsClient.generator_path(**expected) @@ -11715,8 +11871,8 @@ def test_parse_generator_path(): def test_knowledge_base_path(): - project = "scallop" - knowledge_base = "abalone" + project = "oyster" + knowledge_base = "nudibranch" expected = "projects/{project}/knowledgeBases/{knowledge_base}".format( project=project, knowledge_base=knowledge_base, @@ -11727,8 +11883,8 @@ def test_knowledge_base_path(): def test_parse_knowledge_base_path(): expected = { - "project": "squid", - "knowledge_base": "clam", + "project": "cuttlefish", + "knowledge_base": "mussel", } path = ConversationsClient.knowledge_base_path(**expected) @@ -11738,9 +11894,9 @@ def test_parse_knowledge_base_path(): def test_message_path(): - project = "whelk" - conversation = "octopus" - message = "oyster" + project = "winkle" + conversation = "nautilus" + message = "scallop" expected = ( "projects/{project}/conversations/{conversation}/messages/{message}".format( project=project, @@ -11754,9 +11910,9 @@ def test_message_path(): def test_parse_message_path(): expected = { - "project": "nudibranch", - "conversation": "cuttlefish", - "message": "mussel", + "project": "abalone", + "conversation": "squid", + "message": "clam", } path = ConversationsClient.message_path(**expected) @@ -11766,9 +11922,9 @@ def test_parse_message_path(): def test_phrase_set_path(): - project = "winkle" - location = "nautilus" - phrase_set = "scallop" + project = "whelk" + location = "octopus" + phrase_set = "oyster" expected = "projects/{project}/locations/{location}/phraseSets/{phrase_set}".format( project=project, location=location, @@ -11780,9 +11936,9 @@ def test_phrase_set_path(): def test_parse_phrase_set_path(): expected = { - "project": "abalone", - "location": "squid", - "phrase_set": "clam", + "project": "nudibranch", + "location": "cuttlefish", + "phrase_set": "mussel", } path = ConversationsClient.phrase_set_path(**expected) @@ -11792,9 +11948,38 @@ def test_parse_phrase_set_path(): def test_tool_path(): - project = "whelk" - location = "octopus" - tool = "oyster" + project = "winkle" + location = "nautilus" + app = "scallop" + tool = "abalone" + expected = "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + actual = ConversationsClient.tool_path(project, location, app, tool) + assert expected == actual + + +def test_parse_tool_path(): + expected = { + "project": "squid", + "location": "clam", + "app": "whelk", + "tool": "octopus", + } + path = ConversationsClient.tool_path(**expected) + + # Check that the path construction is reversible. + actual = ConversationsClient.parse_tool_path(path) + assert expected == actual + + +def test_tool_path(): + project = "oyster" + location = "nudibranch" + tool = "cuttlefish" expected = "projects/{project}/locations/{location}/tools/{tool}".format( project=project, location=location, @@ -11806,9 +11991,9 @@ def test_tool_path(): def test_parse_tool_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "tool": "mussel", + "project": "mussel", + "location": "winkle", + "tool": "nautilus", } path = ConversationsClient.tool_path(**expected) @@ -11817,8 +12002,39 @@ def test_parse_tool_path(): assert expected == actual +def test_toolset_path(): + project = "scallop" + location = "abalone" + app = "squid" + toolset = "clam" + expected = ( + "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + ) + actual = ConversationsClient.toolset_path(project, location, app, toolset) + assert expected == actual + + +def test_parse_toolset_path(): + expected = { + "project": "whelk", + "location": "octopus", + "app": "oyster", + "toolset": "nudibranch", + } + path = ConversationsClient.toolset_path(**expected) + + # Check that the path construction is reversible. + actual = ConversationsClient.parse_toolset_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -11828,7 +12044,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "mussel", } path = ConversationsClient.common_billing_account_path(**expected) @@ -11838,7 +12054,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -11848,7 +12064,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "nautilus", } path = ConversationsClient.common_folder_path(**expected) @@ -11858,7 +12074,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -11868,7 +12084,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "abalone", } path = ConversationsClient.common_organization_path(**expected) @@ -11878,7 +12094,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -11888,7 +12104,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "clam", } path = ConversationsClient.common_project_path(**expected) @@ -11898,8 +12114,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -11910,8 +12126,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "oyster", + "location": "nudibranch", } path = ConversationsClient.common_location_path(**expected) diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_generator_evaluations.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_generator_evaluations.py index b9aa48361e8b..2ed23b7459ed 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_generator_evaluations.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_generator_evaluations.py @@ -75,10 +75,14 @@ ) from google.cloud.dialogflow_v2beta1.types import ( agent_coaching_instruction, + ces_app, + ces_tool, generator, generator_evaluation, operations, + tool, tool_call, + toolset, ) from google.cloud.dialogflow_v2beta1.types import ( generator_evaluation as gcd_generator_evaluation, @@ -4320,6 +4324,9 @@ def test_create_generator_evaluation_rest_call_success(request_type): { "tool_call": { "tool": "tool_value", + "ces_tool": "ces_tool_value", + "ces_toolset": "ces_toolset_value", + "ces_app": "ces_app_value", "tool_display_name": "tool_display_name_value", "tool_display_details": "tool_display_details_value", "action": "action_value", @@ -4330,6 +4337,9 @@ def test_create_generator_evaluation_rest_call_success(request_type): }, "tool_call_result": { "tool": "tool_value", + "ces_tool": "ces_tool_value", + "ces_toolset": "ces_toolset_value", + "ces_app": "ces_app_value", "action": "action_value", "error": {"message": "message_value"}, "raw_content": b"raw_content_blob", @@ -4360,6 +4370,19 @@ def test_create_generator_evaluation_rest_call_success(request_type): "enable_deduping": True, "similarity_threshold": 0.21630000000000002, }, + "toolset_tools": [ + { + "toolset": "toolset_value", + "operation_id": "operation_id_value", + "confirmation_requirement": 1, + } + ], + "ces_tool_specs": [ + {"ces_tool": "ces_tool_value", "confirmation_requirement": 1} + ], + "ces_app_specs": [ + {"ces_app": "ces_app_value", "confirmation_requirement": 1} + ], }, "summarization_metrics": { "summarization_evaluation_results": [ @@ -5926,10 +5949,36 @@ def test_generator_evaluations_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_generator_path(): +def test_app_path(): project = "squid" location = "clam" - generator = "whelk" + app = "whelk" + expected = "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + actual = GeneratorEvaluationsClient.app_path(project, location, app) + assert expected == actual + + +def test_parse_app_path(): + expected = { + "project": "octopus", + "location": "oyster", + "app": "nudibranch", + } + path = GeneratorEvaluationsClient.app_path(**expected) + + # Check that the path construction is reversible. + actual = GeneratorEvaluationsClient.parse_app_path(path) + assert expected == actual + + +def test_generator_path(): + project = "cuttlefish" + location = "mussel" + generator = "winkle" expected = "projects/{project}/locations/{location}/generators/{generator}".format( project=project, location=location, @@ -5941,9 +5990,9 @@ def test_generator_path(): def test_parse_generator_path(): expected = { - "project": "octopus", - "location": "oyster", - "generator": "nudibranch", + "project": "nautilus", + "location": "scallop", + "generator": "abalone", } path = GeneratorEvaluationsClient.generator_path(**expected) @@ -5953,10 +6002,10 @@ def test_parse_generator_path(): def test_generator_evaluation_path(): - project = "cuttlefish" - location = "mussel" - generator = "winkle" - evaluation = "nautilus" + project = "squid" + location = "clam" + generator = "whelk" + evaluation = "octopus" expected = "projects/{project}/locations/{location}/generators/{generator}/evaluations/{evaluation}".format( project=project, location=location, @@ -5971,10 +6020,10 @@ def test_generator_evaluation_path(): def test_parse_generator_evaluation_path(): expected = { - "project": "scallop", - "location": "abalone", - "generator": "squid", - "evaluation": "clam", + "project": "oyster", + "location": "nudibranch", + "generator": "cuttlefish", + "evaluation": "mussel", } path = GeneratorEvaluationsClient.generator_evaluation_path(**expected) @@ -5984,9 +6033,38 @@ def test_parse_generator_evaluation_path(): def test_tool_path(): - project = "whelk" - location = "octopus" - tool = "oyster" + project = "winkle" + location = "nautilus" + app = "scallop" + tool = "abalone" + expected = "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + actual = GeneratorEvaluationsClient.tool_path(project, location, app, tool) + assert expected == actual + + +def test_parse_tool_path(): + expected = { + "project": "squid", + "location": "clam", + "app": "whelk", + "tool": "octopus", + } + path = GeneratorEvaluationsClient.tool_path(**expected) + + # Check that the path construction is reversible. + actual = GeneratorEvaluationsClient.parse_tool_path(path) + assert expected == actual + + +def test_tool_path(): + project = "oyster" + location = "nudibranch" + tool = "cuttlefish" expected = "projects/{project}/locations/{location}/tools/{tool}".format( project=project, location=location, @@ -5998,9 +6076,9 @@ def test_tool_path(): def test_parse_tool_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "tool": "mussel", + "project": "mussel", + "location": "winkle", + "tool": "nautilus", } path = GeneratorEvaluationsClient.tool_path(**expected) @@ -6009,8 +6087,39 @@ def test_parse_tool_path(): assert expected == actual +def test_toolset_path(): + project = "scallop" + location = "abalone" + app = "squid" + toolset = "clam" + expected = ( + "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + ) + actual = GeneratorEvaluationsClient.toolset_path(project, location, app, toolset) + assert expected == actual + + +def test_parse_toolset_path(): + expected = { + "project": "whelk", + "location": "octopus", + "app": "oyster", + "toolset": "nudibranch", + } + path = GeneratorEvaluationsClient.toolset_path(**expected) + + # Check that the path construction is reversible. + actual = GeneratorEvaluationsClient.parse_toolset_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -6020,7 +6129,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "mussel", } path = GeneratorEvaluationsClient.common_billing_account_path(**expected) @@ -6030,7 +6139,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -6040,7 +6149,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "nautilus", } path = GeneratorEvaluationsClient.common_folder_path(**expected) @@ -6050,7 +6159,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -6060,7 +6169,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "abalone", } path = GeneratorEvaluationsClient.common_organization_path(**expected) @@ -6070,7 +6179,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -6080,7 +6189,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "clam", } path = GeneratorEvaluationsClient.common_project_path(**expected) @@ -6090,8 +6199,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -6102,8 +6211,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "oyster", + "location": "nudibranch", } path = GeneratorEvaluationsClient.common_location_path(**expected) diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_generators.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_generators.py index 9ae10e043497..ec230d900618 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_generators.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_generators.py @@ -70,8 +70,12 @@ ) from google.cloud.dialogflow_v2beta1.types import ( agent_coaching_instruction, + ces_app, + ces_tool, generator, + tool, tool_call, + toolset, ) from google.cloud.dialogflow_v2beta1.types import generator as gcd_generator @@ -4635,6 +4639,9 @@ def test_create_generator_rest_call_success(request_type): { "tool_call": { "tool": "tool_value", + "ces_tool": "ces_tool_value", + "ces_toolset": "ces_toolset_value", + "ces_app": "ces_app_value", "tool_display_name": "tool_display_name_value", "tool_display_details": "tool_display_details_value", "action": "action_value", @@ -4645,6 +4652,9 @@ def test_create_generator_rest_call_success(request_type): }, "tool_call_result": { "tool": "tool_value", + "ces_tool": "ces_tool_value", + "ces_toolset": "ces_toolset_value", + "ces_app": "ces_app_value", "action": "action_value", "error": {"message": "message_value"}, "raw_content": b"raw_content_blob", @@ -4675,6 +4685,17 @@ def test_create_generator_rest_call_success(request_type): "enable_deduping": True, "similarity_threshold": 0.21630000000000002, }, + "toolset_tools": [ + { + "toolset": "toolset_value", + "operation_id": "operation_id_value", + "confirmation_requirement": 1, + } + ], + "ces_tool_specs": [ + {"ces_tool": "ces_tool_value", "confirmation_requirement": 1} + ], + "ces_app_specs": [{"ces_app": "ces_app_value", "confirmation_requirement": 1}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5340,6 +5361,9 @@ def test_update_generator_rest_call_success(request_type): { "tool_call": { "tool": "tool_value", + "ces_tool": "ces_tool_value", + "ces_toolset": "ces_toolset_value", + "ces_app": "ces_app_value", "tool_display_name": "tool_display_name_value", "tool_display_details": "tool_display_details_value", "action": "action_value", @@ -5350,6 +5374,9 @@ def test_update_generator_rest_call_success(request_type): }, "tool_call_result": { "tool": "tool_value", + "ces_tool": "ces_tool_value", + "ces_toolset": "ces_toolset_value", + "ces_app": "ces_app_value", "action": "action_value", "error": {"message": "message_value"}, "raw_content": b"raw_content_blob", @@ -5380,6 +5407,17 @@ def test_update_generator_rest_call_success(request_type): "enable_deduping": True, "similarity_threshold": 0.21630000000000002, }, + "toolset_tools": [ + { + "toolset": "toolset_value", + "operation_id": "operation_id_value", + "confirmation_requirement": 1, + } + ], + "ces_tool_specs": [ + {"ces_tool": "ces_tool_value", "confirmation_requirement": 1} + ], + "ces_app_specs": [{"ces_app": "ces_app_value", "confirmation_requirement": 1}], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6414,10 +6452,36 @@ def test_generators_transport_channel_mtls_with_adc(transport_class): assert transport.grpc_channel == mock_grpc_channel -def test_generator_path(): +def test_app_path(): project = "squid" location = "clam" - generator = "whelk" + app = "whelk" + expected = "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + actual = GeneratorsClient.app_path(project, location, app) + assert expected == actual + + +def test_parse_app_path(): + expected = { + "project": "octopus", + "location": "oyster", + "app": "nudibranch", + } + path = GeneratorsClient.app_path(**expected) + + # Check that the path construction is reversible. + actual = GeneratorsClient.parse_app_path(path) + assert expected == actual + + +def test_generator_path(): + project = "cuttlefish" + location = "mussel" + generator = "winkle" expected = "projects/{project}/locations/{location}/generators/{generator}".format( project=project, location=location, @@ -6429,9 +6493,9 @@ def test_generator_path(): def test_parse_generator_path(): expected = { - "project": "octopus", - "location": "oyster", - "generator": "nudibranch", + "project": "nautilus", + "location": "scallop", + "generator": "abalone", } path = GeneratorsClient.generator_path(**expected) @@ -6441,9 +6505,38 @@ def test_parse_generator_path(): def test_tool_path(): - project = "cuttlefish" - location = "mussel" - tool = "winkle" + project = "squid" + location = "clam" + app = "whelk" + tool = "octopus" + expected = "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + actual = GeneratorsClient.tool_path(project, location, app, tool) + assert expected == actual + + +def test_parse_tool_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "app": "cuttlefish", + "tool": "mussel", + } + path = GeneratorsClient.tool_path(**expected) + + # Check that the path construction is reversible. + actual = GeneratorsClient.parse_tool_path(path) + assert expected == actual + + +def test_tool_path(): + project = "winkle" + location = "nautilus" + tool = "scallop" expected = "projects/{project}/locations/{location}/tools/{tool}".format( project=project, location=location, @@ -6455,9 +6548,9 @@ def test_tool_path(): def test_parse_tool_path(): expected = { - "project": "nautilus", - "location": "scallop", - "tool": "abalone", + "project": "abalone", + "location": "squid", + "tool": "clam", } path = GeneratorsClient.tool_path(**expected) @@ -6466,8 +6559,39 @@ def test_parse_tool_path(): assert expected == actual +def test_toolset_path(): + project = "whelk" + location = "octopus" + app = "oyster" + toolset = "nudibranch" + expected = ( + "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + ) + actual = GeneratorsClient.toolset_path(project, location, app, toolset) + assert expected == actual + + +def test_parse_toolset_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "app": "winkle", + "toolset": "nautilus", + } + path = GeneratorsClient.toolset_path(**expected) + + # Check that the path construction is reversible. + actual = GeneratorsClient.parse_toolset_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "squid" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -6477,7 +6601,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "clam", + "billing_account": "abalone", } path = GeneratorsClient.common_billing_account_path(**expected) @@ -6487,7 +6611,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "whelk" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -6497,7 +6621,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "octopus", + "folder": "clam", } path = GeneratorsClient.common_folder_path(**expected) @@ -6507,7 +6631,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "oyster" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -6517,7 +6641,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nudibranch", + "organization": "octopus", } path = GeneratorsClient.common_organization_path(**expected) @@ -6527,7 +6651,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "cuttlefish" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -6537,7 +6661,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "mussel", + "project": "nudibranch", } path = GeneratorsClient.common_project_path(**expected) @@ -6547,8 +6671,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "winkle" - location = "nautilus" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -6559,8 +6683,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "scallop", - "location": "abalone", + "project": "winkle", + "location": "nautilus", } path = GeneratorsClient.common_location_path(**expected) diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_participants.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_participants.py index 856c7716eec0..f75afdb95b0e 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_participants.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_participants.py @@ -10970,10 +10970,36 @@ def test_parse_answer_record_path(): assert expected == actual -def test_context_path(): +def test_app_path(): project = "oyster" - session = "nudibranch" - context = "cuttlefish" + location = "nudibranch" + app = "cuttlefish" + expected = "projects/{project}/locations/{location}/apps/{app}".format( + project=project, + location=location, + app=app, + ) + actual = ParticipantsClient.app_path(project, location, app) + assert expected == actual + + +def test_parse_app_path(): + expected = { + "project": "mussel", + "location": "winkle", + "app": "nautilus", + } + path = ParticipantsClient.app_path(**expected) + + # Check that the path construction is reversible. + actual = ParticipantsClient.parse_app_path(path) + assert expected == actual + + +def test_context_path(): + project = "scallop" + session = "abalone" + context = "squid" expected = "projects/{project}/agent/sessions/{session}/contexts/{context}".format( project=project, session=session, @@ -10985,9 +11011,9 @@ def test_context_path(): def test_parse_context_path(): expected = { - "project": "mussel", - "session": "winkle", - "context": "nautilus", + "project": "clam", + "session": "whelk", + "context": "octopus", } path = ParticipantsClient.context_path(**expected) @@ -10997,9 +11023,9 @@ def test_parse_context_path(): def test_document_path(): - project = "scallop" - knowledge_base = "abalone" - document = "squid" + project = "oyster" + knowledge_base = "nudibranch" + document = "cuttlefish" expected = "projects/{project}/knowledgeBases/{knowledge_base}/documents/{document}".format( project=project, knowledge_base=knowledge_base, @@ -11011,9 +11037,9 @@ def test_document_path(): def test_parse_document_path(): expected = { - "project": "clam", - "knowledge_base": "whelk", - "document": "octopus", + "project": "mussel", + "knowledge_base": "winkle", + "document": "nautilus", } path = ParticipantsClient.document_path(**expected) @@ -11023,8 +11049,8 @@ def test_parse_document_path(): def test_intent_path(): - project = "oyster" - intent = "nudibranch" + project = "scallop" + intent = "abalone" expected = "projects/{project}/agent/intents/{intent}".format( project=project, intent=intent, @@ -11035,8 +11061,8 @@ def test_intent_path(): def test_parse_intent_path(): expected = { - "project": "cuttlefish", - "intent": "mussel", + "project": "squid", + "intent": "clam", } path = ParticipantsClient.intent_path(**expected) @@ -11046,9 +11072,9 @@ def test_parse_intent_path(): def test_message_path(): - project = "winkle" - conversation = "nautilus" - message = "scallop" + project = "whelk" + conversation = "octopus" + message = "oyster" expected = ( "projects/{project}/conversations/{conversation}/messages/{message}".format( project=project, @@ -11062,9 +11088,9 @@ def test_message_path(): def test_parse_message_path(): expected = { - "project": "abalone", - "conversation": "squid", - "message": "clam", + "project": "nudibranch", + "conversation": "cuttlefish", + "message": "mussel", } path = ParticipantsClient.message_path(**expected) @@ -11074,9 +11100,9 @@ def test_parse_message_path(): def test_participant_path(): - project = "whelk" - conversation = "octopus" - participant = "oyster" + project = "winkle" + conversation = "nautilus" + participant = "scallop" expected = "projects/{project}/conversations/{conversation}/participants/{participant}".format( project=project, conversation=conversation, @@ -11088,9 +11114,9 @@ def test_participant_path(): def test_parse_participant_path(): expected = { - "project": "nudibranch", - "conversation": "cuttlefish", - "participant": "mussel", + "project": "abalone", + "conversation": "squid", + "participant": "clam", } path = ParticipantsClient.participant_path(**expected) @@ -11100,9 +11126,9 @@ def test_parse_participant_path(): def test_phrase_set_path(): - project = "winkle" - location = "nautilus" - phrase_set = "scallop" + project = "whelk" + location = "octopus" + phrase_set = "oyster" expected = "projects/{project}/locations/{location}/phraseSets/{phrase_set}".format( project=project, location=location, @@ -11114,9 +11140,9 @@ def test_phrase_set_path(): def test_parse_phrase_set_path(): expected = { - "project": "abalone", - "location": "squid", - "phrase_set": "clam", + "project": "nudibranch", + "location": "cuttlefish", + "phrase_set": "mussel", } path = ParticipantsClient.phrase_set_path(**expected) @@ -11126,9 +11152,9 @@ def test_parse_phrase_set_path(): def test_session_entity_type_path(): - project = "whelk" - session = "octopus" - entity_type = "oyster" + project = "winkle" + session = "nautilus" + entity_type = "scallop" expected = ( "projects/{project}/agent/sessions/{session}/entityTypes/{entity_type}".format( project=project, @@ -11142,9 +11168,9 @@ def test_session_entity_type_path(): def test_parse_session_entity_type_path(): expected = { - "project": "nudibranch", - "session": "cuttlefish", - "entity_type": "mussel", + "project": "abalone", + "session": "squid", + "entity_type": "clam", } path = ParticipantsClient.session_entity_type_path(**expected) @@ -11154,9 +11180,9 @@ def test_parse_session_entity_type_path(): def test_tool_path(): - project = "winkle" - location = "nautilus" - tool = "scallop" + project = "whelk" + location = "octopus" + tool = "oyster" expected = "projects/{project}/locations/{location}/tools/{tool}".format( project=project, location=location, @@ -11168,9 +11194,38 @@ def test_tool_path(): def test_parse_tool_path(): expected = { - "project": "abalone", - "location": "squid", - "tool": "clam", + "project": "nudibranch", + "location": "cuttlefish", + "tool": "mussel", + } + path = ParticipantsClient.tool_path(**expected) + + # Check that the path construction is reversible. + actual = ParticipantsClient.parse_tool_path(path) + assert expected == actual + + +def test_tool_path(): + project = "winkle" + location = "nautilus" + app = "scallop" + tool = "abalone" + expected = "projects/{project}/locations/{location}/apps/{app}/tools/{tool}".format( + project=project, + location=location, + app=app, + tool=tool, + ) + actual = ParticipantsClient.tool_path(project, location, app, tool) + assert expected == actual + + +def test_parse_tool_path(): + expected = { + "project": "squid", + "location": "clam", + "app": "whelk", + "tool": "octopus", } path = ParticipantsClient.tool_path(**expected) @@ -11179,8 +11234,39 @@ def test_parse_tool_path(): assert expected == actual +def test_toolset_path(): + project = "oyster" + location = "nudibranch" + app = "cuttlefish" + toolset = "mussel" + expected = ( + "projects/{project}/locations/{location}/apps/{app}/toolsets/{toolset}".format( + project=project, + location=location, + app=app, + toolset=toolset, + ) + ) + actual = ParticipantsClient.toolset_path(project, location, app, toolset) + assert expected == actual + + +def test_parse_toolset_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "app": "scallop", + "toolset": "abalone", + } + path = ParticipantsClient.toolset_path(**expected) + + # Check that the path construction is reversible. + actual = ParticipantsClient.parse_toolset_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -11190,7 +11276,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "clam", } path = ParticipantsClient.common_billing_account_path(**expected) @@ -11200,7 +11286,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) @@ -11210,7 +11296,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "octopus", } path = ParticipantsClient.common_folder_path(**expected) @@ -11220,7 +11306,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) @@ -11230,7 +11316,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "nudibranch", } path = ParticipantsClient.common_organization_path(**expected) @@ -11240,7 +11326,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "cuttlefish" expected = "projects/{project}".format( project=project, ) @@ -11250,7 +11336,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "mussel", } path = ParticipantsClient.common_project_path(**expected) @@ -11260,8 +11346,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -11272,8 +11358,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "scallop", + "location": "abalone", } path = ParticipantsClient.common_location_path(**expected) diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_phone_numbers.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_phone_numbers.py index e797629dbffb..6e4419232527 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_phone_numbers.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_phone_numbers.py @@ -45,6 +45,7 @@ import google.auth import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore from google.api_core import ( client_options, gapic_v1, @@ -4249,6 +4250,11 @@ def test_update_phone_number_rest_call_success(request_type): "phone_number": "phone_number_value", "conversation_profile": "conversation_profile_value", "lifecycle_state": 1, + "allowed_sip_trunks": { + "sip_trunks": ["sip_trunks_value1", "sip_trunks_value2"], + "carrier_ids": ["carrier_ids_value1", "carrier_ids_value2"], + }, + "purge_time": {"seconds": 751, "nanos": 543}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5568,8 +5574,34 @@ def test_parse_phone_number_path(): assert expected == actual +def test_sip_trunk_path(): + project = "oyster" + location = "nudibranch" + siptrunk = "cuttlefish" + expected = "projects/{project}/locations/{location}/sipTrunks/{siptrunk}".format( + project=project, + location=location, + siptrunk=siptrunk, + ) + actual = PhoneNumbersClient.sip_trunk_path(project, location, siptrunk) + assert expected == actual + + +def test_parse_sip_trunk_path(): + expected = { + "project": "mussel", + "location": "winkle", + "siptrunk": "nautilus", + } + path = PhoneNumbersClient.sip_trunk_path(**expected) + + # Check that the path construction is reversible. + actual = PhoneNumbersClient.parse_sip_trunk_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -5579,7 +5611,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "abalone", } path = PhoneNumbersClient.common_billing_account_path(**expected) @@ -5589,7 +5621,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -5599,7 +5631,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "clam", } path = PhoneNumbersClient.common_folder_path(**expected) @@ -5609,7 +5641,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -5619,7 +5651,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "octopus", } path = PhoneNumbersClient.common_organization_path(**expected) @@ -5629,7 +5661,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -5639,7 +5671,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "nudibranch", } path = PhoneNumbersClient.common_project_path(**expected) @@ -5649,8 +5681,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -5661,8 +5693,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "winkle", + "location": "nautilus", } path = PhoneNumbersClient.common_location_path(**expected) diff --git a/packages/google-cloud-discoveryengine/.repo-metadata.json b/packages/google-cloud-discoveryengine/.repo-metadata.json index 5e5c57d5f432..0b1bed837a4d 100644 --- a/packages/google-cloud-discoveryengine/.repo-metadata.json +++ b/packages/google-cloud-discoveryengine/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "", - "api_id": "discoveryengine.googleapis.com", - "api_shortname": "discoveryengine", - "client_documentation": "https://cloud.google.com/python/docs/reference/discoveryengine/latest", - "default_version": "v1beta", - "distribution_name": "google-cloud-discoveryengine", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "discoveryengine", - "name_pretty": "Discovery Engine API", - "product_documentation": "https://cloud.google.com/discovery-engine/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Discovery Engine API.", + "api_id": "discoveryengine.googleapis.com", + "api_shortname": "discoveryengine", + "client_documentation": "https://cloud.google.com/python/docs/reference/discoveryengine/latest", + "default_version": "v1beta", + "distribution_name": "google-cloud-discoveryengine", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "discoveryengine", + "name_pretty": "Discovery Engine API", + "product_documentation": "https://cloud.google.com/discovery-engine/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-discoveryengine/README.rst b/packages/google-cloud-discoveryengine/README.rst index dc297230d6eb..718914e2a512 100644 --- a/packages/google-cloud-discoveryengine/README.rst +++ b/packages/google-cloud-discoveryengine/README.rst @@ -3,7 +3,7 @@ Python Client for Discovery Engine API |preview| |pypi| |versions| -`Discovery Engine API`_: +`Discovery Engine API`_: Discovery Engine API. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-discoveryengine/docs/README.rst b/packages/google-cloud-discoveryengine/docs/README.rst index dc297230d6eb..718914e2a512 100644 --- a/packages/google-cloud-discoveryengine/docs/README.rst +++ b/packages/google-cloud-discoveryengine/docs/README.rst @@ -3,7 +3,7 @@ Python Client for Discovery Engine API |preview| |pypi| |versions| -`Discovery Engine API`_: +`Discovery Engine API`_: Discovery Engine API. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-dlp/.repo-metadata.json b/packages/google-cloud-dlp/.repo-metadata.json index e421c8afe3e2..4e9fe97a2ce6 100644 --- a/packages/google-cloud-dlp/.repo-metadata.json +++ b/packages/google-cloud-dlp/.repo-metadata.json @@ -1,17 +1,15 @@ { - "api_description": "provides programmatic access to a powerful detection engine for personally identifiable information and other privacy-sensitive data in unstructured data streams, like text blocks and images.", - "api_id": "dlp.googleapis.com", - "api_shortname": "dlp", - "client_documentation": "https://cloud.google.com/python/docs/reference/dlp/latest", - "default_version": "v2", - "distribution_name": "google-cloud-dlp", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "dlp", - "name_pretty": "Cloud Data Loss Prevention", - "product_documentation": "https://cloud.google.com/dlp/docs/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "provides programmatic access to a powerful detection engine for personally identifiable information and other privacy-sensitive data in unstructured data streams, like text blocks and images.", + "api_id": "dlp.googleapis.com", + "api_shortname": "dlp", + "client_documentation": "https://cloud.google.com/python/docs/reference/dlp/latest", + "default_version": "v2", + "distribution_name": "google-cloud-dlp", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "dlp", + "name_pretty": "Cloud Data Loss Prevention", + "product_documentation": "https://cloud.google.com/dlp/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-dlp/google/cloud/dlp/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp/__init__.py index 4fa193fcb08c..9317e8861508 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp/__init__.py +++ b/packages/google-cloud-dlp/google/cloud/dlp/__init__.py @@ -63,6 +63,7 @@ Container, ContentItem, ContentLocation, + ContentMetadata, ContentOption, CreateConnectionRequest, CreateDeidentifyTemplateRequest, @@ -193,6 +194,7 @@ InspectTemplate, JobTrigger, KeyValueMetadataLabel, + KeyValueMetadataProperty, KmsWrappedCryptoKey, LargeCustomDictionaryConfig, LargeCustomDictionaryStats, @@ -381,6 +383,7 @@ "Container", "ContentItem", "ContentLocation", + "ContentMetadata", "CreateConnectionRequest", "CreateDeidentifyTemplateRequest", "CreateDiscoveryConfigRequest", @@ -506,6 +509,7 @@ "InspectTemplate", "JobTrigger", "KeyValueMetadataLabel", + "KeyValueMetadataProperty", "KmsWrappedCryptoKey", "LargeCustomDictionaryConfig", "LargeCustomDictionaryStats", diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py index 752d58fdad89..91f7bc0c26b8 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py @@ -73,6 +73,7 @@ Container, ContentItem, ContentLocation, + ContentMetadata, ContentOption, CreateConnectionRequest, CreateDeidentifyTemplateRequest, @@ -203,6 +204,7 @@ InspectTemplate, JobTrigger, KeyValueMetadataLabel, + KeyValueMetadataProperty, KmsWrappedCryptoKey, LargeCustomDictionaryConfig, LargeCustomDictionaryStats, @@ -497,6 +499,7 @@ def _get_version(dependency_name): "Container", "ContentItem", "ContentLocation", + "ContentMetadata", "ContentOption", "CreateConnectionRequest", "CreateDeidentifyTemplateRequest", @@ -637,6 +640,7 @@ def _get_version(dependency_name): "JobTrigger", "Key", "KeyValueMetadataLabel", + "KeyValueMetadataProperty", "KindExpression", "KmsWrappedCryptoKey", "LargeCustomDictionaryConfig", diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py index e932ddc37626..58af7da344d8 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py @@ -56,6 +56,7 @@ Container, ContentItem, ContentLocation, + ContentMetadata, ContentOption, CreateConnectionRequest, CreateDeidentifyTemplateRequest, @@ -186,6 +187,7 @@ InspectTemplate, JobTrigger, KeyValueMetadataLabel, + KeyValueMetadataProperty, KmsWrappedCryptoKey, LargeCustomDictionaryConfig, LargeCustomDictionaryStats, @@ -372,6 +374,7 @@ "Container", "ContentItem", "ContentLocation", + "ContentMetadata", "CreateConnectionRequest", "CreateDeidentifyTemplateRequest", "CreateDiscoveryConfigRequest", @@ -497,6 +500,7 @@ "InspectTemplate", "JobTrigger", "KeyValueMetadataLabel", + "KeyValueMetadataProperty", "KmsWrappedCryptoKey", "LargeCustomDictionaryConfig", "LargeCustomDictionaryStats", diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py index d3a32195b1af..fbd6907f7bb2 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py @@ -65,7 +65,9 @@ "InspectConfig", "ByteContentItem", "ContentItem", + "ContentMetadata", "Table", + "KeyValueMetadataProperty", "InspectResult", "Finding", "Location", @@ -684,11 +686,14 @@ class MetadataType(proto.Enum): Storage. CONTENT_METADATA (3): Metadata extracted from the files. + CLIENT_PROVIDED_METADATA (4): + Metadata provided by the client. """ METADATATYPE_UNSPECIFIED = 0 STORAGE_METADATA = 2 CONTENT_METADATA = 3 + CLIENT_PROVIDED_METADATA = 4 class InfoTypeSupportedBy(proto.Enum): @@ -1630,6 +1635,8 @@ class ContentItem(proto.Message): ``data``. This field is a member of `oneof`_ ``data_item``. + content_metadata (google.cloud.dlp_v2.types.ContentMetadata): + User provided metadata for the content. """ value: str = proto.Field( @@ -1649,6 +1656,27 @@ class ContentItem(proto.Message): oneof="data_item", message="ByteContentItem", ) + content_metadata: "ContentMetadata" = proto.Field( + proto.MESSAGE, + number=6, + message="ContentMetadata", + ) + + +class ContentMetadata(proto.Message): + r"""Metadata on content to be scanned. + + Attributes: + properties (MutableSequence[google.cloud.dlp_v2.types.KeyValueMetadataProperty]): + User provided key-value pairs of content + metadata. + """ + + properties: MutableSequence["KeyValueMetadataProperty"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="KeyValueMetadataProperty", + ) class Table(proto.Message): @@ -1690,6 +1718,26 @@ class Row(proto.Message): ) +class KeyValueMetadataProperty(proto.Message): + r"""A key-value pair in the Metadata. + + Attributes: + key (str): + The key of the property. + value (str): + The value of the property. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + value: str = proto.Field( + proto.STRING, + number=2, + ) + + class InspectResult(proto.Message): r"""All the findings for a single scanned item. diff --git a/packages/google-cloud-dms/.repo-metadata.json b/packages/google-cloud-dms/.repo-metadata.json index 5d92975a3a4f..50039f316b19 100644 --- a/packages/google-cloud-dms/.repo-metadata.json +++ b/packages/google-cloud-dms/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "makes it easier for you to migrate your data to Google Cloud. This service helps you lift and shift your MySQL and PostgreSQL workloads into Cloud SQL.", - "api_id": "datamigration.googleapis.com", - "api_shortname": "datamigration", - "client_documentation": "https://cloud.google.com/python/docs/reference/datamigration/latest", - "default_version": "v1", - "distribution_name": "google-cloud-dms", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "datamigration", - "name_pretty": "Cloud Database Migration Service", - "product_documentation": "https://cloud.google.com/database-migration/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "makes it easier for you to migrate your data to Google Cloud. This service helps you lift and shift your MySQL and PostgreSQL workloads into Cloud SQL.", + "api_id": "datamigration.googleapis.com", + "api_shortname": "datamigration", + "client_documentation": "https://cloud.google.com/python/docs/reference/datamigration/latest", + "default_version": "v1", + "distribution_name": "google-cloud-dms", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "datamigration", + "name_pretty": "Cloud Database Migration Service", + "product_documentation": "https://cloud.google.com/database-migration/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-dns/.repo-metadata.json b/packages/google-cloud-dns/.repo-metadata.json index 68c12afd7dd3..3bc0184c2a3f 100644 --- a/packages/google-cloud-dns/.repo-metadata.json +++ b/packages/google-cloud-dns/.repo-metadata.json @@ -1,17 +1,13 @@ { - "name": "dns", - "name_pretty": "Cloud DNS", - "product_documentation": "https://cloud.google.com/dns", + "api_shortname": "dns", "client_documentation": "https://cloud.google.com/python/docs/reference/dns/latest", + "distribution_name": "google-cloud-dns", "issue_tracker": "https://issuetracker.google.com/savedsearches/559772", - "release_level": "preview", "language": "python", "library_type": "REST", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-dns", - "requires_billing": true, - "default_version": "", - "codeowner_team": "", - "api_shortname": "dns", - "api_description": "provides methods that you can use to manage DNS for your applications." -} + "name": "dns", + "name_pretty": "Cloud DNS", + "product_documentation": "https://cloud.google.com/dns", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-dns/docs/README.rst b/packages/google-cloud-dns/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/packages/google-cloud-dns/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/packages/google-cloud-dns/docs/README.rst b/packages/google-cloud-dns/docs/README.rst new file mode 100644 index 000000000000..ca28886a43d7 --- /dev/null +++ b/packages/google-cloud-dns/docs/README.rst @@ -0,0 +1,109 @@ +Python Client for Cloud DNS API +=============================== + +|preview| |pypi| |versions| + +`Cloud DNS API`_: provides methods that you can use to manage DNS for your applications. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-dns.svg + :target: https://pypi.org/project/google-cloud-dns/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-dns.svg + :target: https://pypi.org/project/google-cloud-dns/ +.. _Cloud DNS API: https://cloud.google.com/dns +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/dns/latest +.. _Product Documentation: https://cloud.google.com/dns + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Cloud DNS API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Cloud DNS API.: https://cloud.google.com/dns +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in `python-docs-samples`_ repository. + +.. _python-docs-samples: https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/dns/api + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.9 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.8 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-dns + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-dns + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Cloud DNS API + to see other available methods on the client. +- Read the `Cloud DNS API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Cloud DNS API Product documentation: https://cloud.google.com/dns +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-dns/docs/conf.py b/packages/google-cloud-dns/docs/conf.py index dbb06d613380..58173c9879a9 100644 --- a/packages/google-cloud-dns/docs/conf.py +++ b/packages/google-cloud-dns/docs/conf.py @@ -24,9 +24,9 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os import shlex +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the diff --git a/packages/google-cloud-dns/google/cloud/dns/__init__.py b/packages/google-cloud-dns/google/cloud/dns/__init__.py index d26ba7093581..8b3d7ff4fd86 100644 --- a/packages/google-cloud-dns/google/cloud/dns/__init__.py +++ b/packages/google-cloud-dns/google/cloud/dns/__init__.py @@ -24,12 +24,10 @@ (adding/deleting resource record sets) to a zone. """ -from google.cloud.dns.version import __version__ -from google.cloud.dns.zone import Changes from google.cloud.dns.client import Client -from google.cloud.dns.zone import ManagedZone from google.cloud.dns.resource_record_set import ResourceRecordSet - +from google.cloud.dns.version import __version__ +from google.cloud.dns.zone import Changes, ManagedZone SCOPE = Client.SCOPE diff --git a/packages/google-cloud-dns/google/cloud/dns/_http.py b/packages/google-cloud-dns/google/cloud/dns/_http.py index 51f3f5634af7..5f1b34323e3f 100644 --- a/packages/google-cloud-dns/google/cloud/dns/_http.py +++ b/packages/google-cloud-dns/google/cloud/dns/_http.py @@ -15,7 +15,6 @@ """Create / interact with Google Cloud DNS connections.""" from google.cloud import _http - from google.cloud.dns import __version__ diff --git a/packages/google-cloud-dns/google/cloud/dns/changes.py b/packages/google-cloud-dns/google/cloud/dns/changes.py index aaf03b683958..33fba642c0d9 100644 --- a/packages/google-cloud-dns/google/cloud/dns/changes.py +++ b/packages/google-cloud-dns/google/cloud/dns/changes.py @@ -16,6 +16,7 @@ from google.cloud._helpers import _rfc3339_to_datetime from google.cloud.exceptions import NotFound + from google.cloud.dns.resource_record_set import ResourceRecordSet diff --git a/packages/google-cloud-dns/google/cloud/dns/client.py b/packages/google-cloud-dns/google/cloud/dns/client.py index 780849d70377..66af7c5f3714 100644 --- a/packages/google-cloud-dns/google/cloud/dns/client.py +++ b/packages/google-cloud-dns/google/cloud/dns/client.py @@ -14,8 +14,8 @@ """Client for interacting with the Google Cloud DNS API.""" -from google.api_core import page_iterator from google.api_core import client_options as client_options_mod +from google.api_core import page_iterator from google.cloud.client import ClientWithProject from google.cloud.dns._http import Connection diff --git a/packages/google-cloud-dns/google/cloud/dns/zone.py b/packages/google-cloud-dns/google/cloud/dns/zone.py index 71ab81cc7c54..01bc201f1015 100644 --- a/packages/google-cloud-dns/google/cloud/dns/zone.py +++ b/packages/google-cloud-dns/google/cloud/dns/zone.py @@ -17,6 +17,7 @@ from google.api_core import page_iterator from google.cloud._helpers import _rfc3339_to_datetime from google.cloud.exceptions import NotFound + from google.cloud.dns.changes import Changes from google.cloud.dns.resource_record_set import ResourceRecordSet diff --git a/packages/google-cloud-dns/setup.py b/packages/google-cloud-dns/setup.py index 2e95c50fa9c5..6fe498954f26 100644 --- a/packages/google-cloud-dns/setup.py +++ b/packages/google-cloud-dns/setup.py @@ -18,7 +18,6 @@ import setuptools - # Package metadata. name = "google-cloud-dns" diff --git a/packages/google-cloud-dns/tests/unit/test__http.py b/packages/google-cloud-dns/tests/unit/test__http.py index 3da656d63916..aac2ea19f1ea 100644 --- a/packages/google-cloud-dns/tests/unit/test__http.py +++ b/packages/google-cloud-dns/tests/unit/test__http.py @@ -28,8 +28,7 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_build_api_url_no_extra_query_params(self): - from urllib.parse import parse_qsl - from urllib.parse import urlsplit + from urllib.parse import parse_qsl, urlsplit conn = self._make_one(object()) uri = conn.build_api_url("/foo") @@ -42,8 +41,7 @@ def test_build_api_url_no_extra_query_params(self): self.assertEqual(parms, {}) def test_build_api_url_w_custom_endpoint(self): - from urllib.parse import parse_qsl - from urllib.parse import urlsplit + from urllib.parse import parse_qsl, urlsplit custom_endpoint = "https://foo-dns.googleapis.com" conn = self._make_one(object(), api_endpoint=custom_endpoint) @@ -57,8 +55,7 @@ def test_build_api_url_w_custom_endpoint(self): self.assertEqual(parms, {}) def test_build_api_url_w_extra_query_params(self): - from urllib.parse import parse_qsl - from urllib.parse import urlsplit + from urllib.parse import parse_qsl, urlsplit conn = self._make_one(object()) uri = conn.build_api_url("/foo", {"bar": "baz"}) @@ -70,6 +67,7 @@ def test_build_api_url_w_extra_query_params(self): def test_extra_headers(self): import requests + from google.cloud import _http as base_http http = mock.create_autospec(requests.Session, instance=True) diff --git a/packages/google-cloud-dns/tests/unit/test_changes.py b/packages/google-cloud-dns/tests/unit/test_changes.py index 617110889b4f..05020042b564 100644 --- a/packages/google-cloud-dns/tests/unit/test_changes.py +++ b/packages/google-cloud-dns/tests/unit/test_changes.py @@ -30,8 +30,7 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def _setUpConstants(self): - from google.cloud._helpers import UTC - from google.cloud._helpers import _NOW + from google.cloud._helpers import _NOW, UTC self.WHEN = _NOW().replace(tzinfo=UTC) diff --git a/packages/google-cloud-dns/tests/unit/test_client.py b/packages/google-cloud-dns/tests/unit/test_client.py index 19829e0f21eb..aa0a72b81694 100644 --- a/packages/google-cloud-dns/tests/unit/test_client.py +++ b/packages/google-cloud-dns/tests/unit/test_client.py @@ -38,6 +38,7 @@ def _make_one(self, *args, **kw): def test_ctor_defaults(self): from google.api_core.client_info import ClientInfo + from google.cloud.dns._http import Connection creds = _make_credentials() @@ -53,6 +54,7 @@ def test_ctor_defaults(self): def test_ctor_w_client_info(self): from google.api_core.client_info import ClientInfo + from google.cloud.dns._http import Connection client_info = ClientInfo() @@ -70,6 +72,7 @@ def test_ctor_w_client_info(self): def test_ctor_w_empty_client_options_object(self): from google.api_core.client_info import ClientInfo from google.api_core.client_options import ClientOptions + from google.cloud.dns._http import Connection creds = _make_credentials() diff --git a/packages/google-cloud-dns/tests/unit/test_zone.py b/packages/google-cloud-dns/tests/unit/test_zone.py index 2b240bb1b762..68cf12deadce 100644 --- a/packages/google-cloud-dns/tests/unit/test_zone.py +++ b/packages/google-cloud-dns/tests/unit/test_zone.py @@ -32,6 +32,7 @@ def _make_one(self, *args, **kw): def _setUpConstants(self): import datetime + from google.cloud._helpers import UTC year = 2015 diff --git a/packages/google-cloud-documentai-toolbox/.repo-metadata.json b/packages/google-cloud-documentai-toolbox/.repo-metadata.json index 24dec4050e28..a4ab0d600ae7 100644 --- a/packages/google-cloud-documentai-toolbox/.repo-metadata.json +++ b/packages/google-cloud-documentai-toolbox/.repo-metadata.json @@ -1,14 +1,12 @@ { - "name": "documentai-toolbox", - "name_pretty": "Document AI Toolbox", - "issue_tracker": "https://github.com/googleapis/python-documentai-toolbox/issues", "client_documentation": "https://cloud.google.com/python/docs/reference/documentai-toolbox/latest", - "release_level": "preview", + "default_version": "v1", + "distribution_name": "google-cloud-documentai-toolbox", + "issue_tracker": "https://github.com/googleapis/python-documentai-toolbox/issues", "language": "python", "library_type": "OTHER", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-documentai-toolbox", - "requires_billing": true, - "default_version": "v1", - "codeowner_team": "@googleapis/cdpe-cloudai" -} + "name": "documentai-toolbox", + "name_pretty": "Document AI Toolbox", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-documentai-toolbox/docs/README.rst b/packages/google-cloud-documentai-toolbox/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/packages/google-cloud-documentai-toolbox/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/packages/google-cloud-documentai-toolbox/docs/README.rst b/packages/google-cloud-documentai-toolbox/docs/README.rst new file mode 100644 index 000000000000..8ea93c665413 --- /dev/null +++ b/packages/google-cloud-documentai-toolbox/docs/README.rst @@ -0,0 +1,114 @@ +Document AI Toolbox +================================= + +|experimental| |versions| + +`Document AI Toolbox`_: Document AI Toolbox aims to reduce the friction of managing, manipulating, and extracting information from outputs of Document AI ``BatchProcessDocuments()`` (JSON files written to user-managed GCS buckets) and ``ProcessDocument()`` methods, programmatically. + +**Disclaimer** + +The Document AI Toolbox is in an experimental state. This library is a work-in-progress and is likely to have backwards-incompatible changes. Users of the toolbox might need to rewrite their code when upgrading the toolbox version. + +.. |experimental| image:: https://img.shields.io/badge/support-experimental-red.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-documentai-toolbox.svg + :target: https://pypi.org/project/google-cloud-documentai-toolbox/ + + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +Enabling Document AI may not be technically required to use this library, unless ``ProcessDocument()`` is intended to be used. +But you must enable the Storage API to access GCS buckets that contains the output of ``BatchProcessDocument()``. +You will need to enable the BigQuery API if you intend to use the BigQuery features. + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Storage API.`_ +4. `Enable the Document AI API.`_ +5. `Enable the BigQuery API.`_ +6. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Storage API.: https://cloud.google.com/storage/docs +.. _Enable the Document AI API.: https://cloud.google.com/document-ai/docs +.. _Enable the BigQuery API.: https://cloud.google.com/bigquery/docs +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.9 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.8 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-documentai-toolbox + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-documentai-toolbox + +Next Steps +~~~~~~~~~~ + +- Read the `Document AI product documentation`_ to learn + more about the product and see How-to Guides. +- Read the `Client Library Documentation`_ for Document AI Toolbox + to see other available methods on the client. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _`Client Library Documentation`: https://cloud.google.com/python/docs/reference/documentai-toolbox/latest +.. _`Document AI documentation`: https://cloud.google.com/document-ai +.. _`Document AI product documentation`: https://cloud.google.com/document-ai/docs/overview +.. _`README`: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-documentai/.repo-metadata.json b/packages/google-cloud-documentai/.repo-metadata.json index d78fa1206877..03ed9ee3dabb 100644 --- a/packages/google-cloud-documentai/.repo-metadata.json +++ b/packages/google-cloud-documentai/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "Service to parse structured information from unstructured or semi-structured documents using state-of-the-art Google AI such as natural language, computer vision, translation, and AutoML.", - "api_id": "documentai.googleapis.com", - "api_shortname": "documentai", - "client_documentation": "https://cloud.google.com/python/docs/reference/documentai/latest", - "default_version": "v1", - "distribution_name": "google-cloud-documentai", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "documentai", - "name_pretty": "Document AI", - "product_documentation": "https://cloud.google.com/document-ai/docs", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "Service to parse structured information from unstructured or semi-structured documents using state-of-the-art Google AI such as natural language, computer vision, translation, and AutoML.", + "api_id": "documentai.googleapis.com", + "api_shortname": "documentai", + "client_documentation": "https://cloud.google.com/python/docs/reference/documentai/latest", + "default_version": "v1", + "distribution_name": "google-cloud-documentai", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1132231\u0026template=1639002", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "documentai", + "name_pretty": "Document AI", + "product_documentation": "https://cloud.google.com/document-ai/docs", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-domains/.repo-metadata.json b/packages/google-cloud-domains/.repo-metadata.json index 86c692d1a4e7..500991a186dc 100644 --- a/packages/google-cloud-domains/.repo-metadata.json +++ b/packages/google-cloud-domains/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "allows you to register and manage domains by using Cloud Domains.", - "api_id": "domains.googleapis.com", - "api_shortname": "domains", - "client_documentation": "https://cloud.google.com/python/docs/reference/domains/latest", - "default_version": "v1", - "distribution_name": "google-cloud-domains", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "domains", - "name_pretty": "Cloud Domains", - "product_documentation": "https://cloud.google.com/domains", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "allows you to register and manage domains by using Cloud Domains.", + "api_id": "domains.googleapis.com", + "api_shortname": "domains", + "client_documentation": "https://cloud.google.com/python/docs/reference/domains/latest", + "default_version": "v1", + "distribution_name": "google-cloud-domains", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "domains", + "name_pretty": "Cloud Domains", + "product_documentation": "https://cloud.google.com/domains", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-edgecontainer/.repo-metadata.json b/packages/google-cloud-edgecontainer/.repo-metadata.json index 94a220574f21..3f925408413a 100644 --- a/packages/google-cloud-edgecontainer/.repo-metadata.json +++ b/packages/google-cloud-edgecontainer/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Google Distributed Cloud Edge allows you to run Kubernetes clusters on dedicated hardware provided and maintained by Google that is separate from the Google Cloud data center.", - "api_id": "edgecontainer.googleapis.com", - "api_shortname": "edgecontainer", - "client_documentation": "https://cloud.google.com/python/docs/reference/edgecontainer/latest", - "default_version": "v1", - "distribution_name": "google-cloud-edgecontainer", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "edgecontainer", - "name_pretty": "Distributed Cloud Edge Container", - "product_documentation": "https://cloud.google.com/distributed-cloud/edge", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Google Distributed Cloud Edge allows you to run Kubernetes clusters on dedicated hardware provided and maintained by Google that is separate from the Google Cloud data center.", + "api_id": "edgecontainer.googleapis.com", + "api_shortname": "edgecontainer", + "client_documentation": "https://cloud.google.com/python/docs/reference/edgecontainer/latest", + "default_version": "v1", + "distribution_name": "google-cloud-edgecontainer", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "edgecontainer", + "name_pretty": "Distributed Cloud Edge Container", + "product_documentation": "https://cloud.google.com/distributed-cloud/edge", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-edgenetwork/.repo-metadata.json b/packages/google-cloud-edgenetwork/.repo-metadata.json index 34e17246175a..b2a6938f84b3 100644 --- a/packages/google-cloud-edgenetwork/.repo-metadata.json +++ b/packages/google-cloud-edgenetwork/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Network management API for Distributed Cloud Edge", - "api_id": "edgenetwork.googleapis.com", - "api_shortname": "edgenetwork", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-edgenetwork/latest", - "default_version": "v1", - "distribution_name": "google-cloud-edgenetwork", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=187192&template=1162689", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-edgenetwork", - "name_pretty": "Distributed Cloud Edge Network API", - "product_documentation": "https://cloud.google.com/distributed-cloud/edge/latest/docs/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Network management API for Distributed Cloud Edge", + "api_id": "edgenetwork.googleapis.com", + "api_shortname": "edgenetwork", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-edgenetwork/latest", + "default_version": "v1", + "distribution_name": "google-cloud-edgenetwork", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=187192\u0026template=1162689", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-edgenetwork", + "name_pretty": "Distributed Cloud Edge Network API", + "product_documentation": "https://cloud.google.com/distributed-cloud/edge/latest/docs/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-enterpriseknowledgegraph/.repo-metadata.json b/packages/google-cloud-enterpriseknowledgegraph/.repo-metadata.json index 9caaf3815e64..1711207947b0 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/.repo-metadata.json +++ b/packages/google-cloud-enterpriseknowledgegraph/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "", - "api_id": "enterpriseknowledgegraph.googleapis.com", - "api_shortname": "enterpriseknowledgegraph", - "client_documentation": "https://cloud.google.com/python/docs/reference/enterpriseknowledgegraph/latest", - "default_version": "v1", - "distribution_name": "google-cloud-enterpriseknowledgegraph", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "enterpriseknowledgegraph", - "name_pretty": "Enterprise Knowledge Graph", - "product_documentation": "https://cloud.google.com/enterprise-knowledge-graph/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_id": "enterpriseknowledgegraph.googleapis.com", + "api_shortname": "enterpriseknowledgegraph", + "client_documentation": "https://cloud.google.com/python/docs/reference/enterpriseknowledgegraph/latest", + "default_version": "v1", + "distribution_name": "google-cloud-enterpriseknowledgegraph", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "enterpriseknowledgegraph", + "name_pretty": "Enterprise Knowledge Graph", + "product_documentation": "https://cloud.google.com/enterprise-knowledge-graph/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-error-reporting/.repo-metadata.json b/packages/google-cloud-error-reporting/.repo-metadata.json index 05f471c21d8e..48cfd1550f6a 100644 --- a/packages/google-cloud-error-reporting/.repo-metadata.json +++ b/packages/google-cloud-error-reporting/.repo-metadata.json @@ -1,18 +1,16 @@ { - "name": "clouderrorreporting", - "name_pretty": "Error Reporting API", - "product_documentation": "https://cloud.google.com/error-reporting", + "api_description": "counts, analyzes and aggregates the crashes in your running cloud services. A centralized error management interface displays the results with sorting and filtering capabilities. A dedicated view shows the error details: time chart, occurrences, affected user count, first and last seen dates and a cleaned exception stack trace. Opt-in to receive email and mobile alerts on new errors.", + "api_id": "clouderrorreporting.googleapis.com", + "api_shortname": "clouderrorreporting", "client_documentation": "https://cloud.google.com/python/docs/reference/clouderrorreporting/latest", + "default_version": "v1beta1", + "distribution_name": "google-cloud-error-reporting", "issue_tracker": "https://issuetracker.google.com/savedsearches/559780", - "release_level": "preview", "language": "python", "library_type": "GAPIC_COMBO", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-error-reporting", - "api_id": "clouderrorreporting.googleapis.com", - "requires_billing": false, - "codeowner_team": "@googleapis/yoshi-python", - "default_version": "v1beta1", - "api_shortname": "clouderrorreporting", - "api_description": "counts, analyzes and aggregates the crashes in your running cloud services. A centralized error management interface displays the results with sorting and filtering capabilities. A dedicated view shows the error details: time chart, occurrences, affected user count, first and last seen dates and a cleaned exception stack trace. Opt-in to receive email and mobile alerts on new errors." -} + "name": "clouderrorreporting", + "name_pretty": "Error Reporting API", + "product_documentation": "https://cloud.google.com/error-reporting", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-essential-contacts/.repo-metadata.json b/packages/google-cloud-essential-contacts/.repo-metadata.json index 7a9034d0ca09..adb0ff0f1b26 100644 --- a/packages/google-cloud-essential-contacts/.repo-metadata.json +++ b/packages/google-cloud-essential-contacts/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "helps you customize who receives notifications by providing your own list of contacts in many Google Cloud services.", - "api_id": "essentialcontacts.googleapis.com", - "api_shortname": "essentialcontacts", - "client_documentation": "https://cloud.google.com/python/docs/reference/essentialcontacts/latest", - "default_version": "v1", - "distribution_name": "google-cloud-essential-contacts", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "essentialcontacts", - "name_pretty": "Essential Contacts", - "product_documentation": "https://cloud.google.com/resource-manager/docs/managing-notification-contacts/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "helps you customize who receives notifications by providing your own list of contacts in many Google Cloud services.", + "api_id": "essentialcontacts.googleapis.com", + "api_shortname": "essentialcontacts", + "client_documentation": "https://cloud.google.com/python/docs/reference/essentialcontacts/latest", + "default_version": "v1", + "distribution_name": "google-cloud-essential-contacts", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "essentialcontacts", + "name_pretty": "Essential Contacts", + "product_documentation": "https://cloud.google.com/resource-manager/docs/managing-notification-contacts/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-eventarc-publishing/.repo-metadata.json b/packages/google-cloud-eventarc-publishing/.repo-metadata.json index ff5952ac78e4..6a11d8944131 100644 --- a/packages/google-cloud-eventarc-publishing/.repo-metadata.json +++ b/packages/google-cloud-eventarc-publishing/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "lets you asynchronously deliver events from Google services, SaaS, and your own apps using loosely coupled services that react to state changes.", - "api_id": "eventarcpublishing.googleapis.com", - "api_shortname": "eventarcpublishing", - "client_documentation": "https://cloud.google.com/python/docs/reference/eventarcpublishing/latest", - "default_version": "v1", - "distribution_name": "google-cloud-eventarc-publishing", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "eventarcpublishing", - "name_pretty": "Eventarc Publishing", - "product_documentation": "https://cloud.google.com/eventarc/docs", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "lets you asynchronously deliver events from Google services, SaaS, and your own apps using loosely coupled services that react to state changes.", + "api_id": "eventarcpublishing.googleapis.com", + "api_shortname": "eventarcpublishing", + "client_documentation": "https://cloud.google.com/python/docs/reference/eventarcpublishing/latest", + "default_version": "v1", + "distribution_name": "google-cloud-eventarc-publishing", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "eventarcpublishing", + "name_pretty": "Eventarc Publishing", + "product_documentation": "https://cloud.google.com/eventarc/docs", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-eventarc/.repo-metadata.json b/packages/google-cloud-eventarc/.repo-metadata.json index 1832aa7492a4..e7fb0382fa71 100644 --- a/packages/google-cloud-eventarc/.repo-metadata.json +++ b/packages/google-cloud-eventarc/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "lets you asynchronously deliver events from Google services, SaaS, and your own apps using loosely coupled services that react to state changes. Eventarc requires no infrastructure management, you can optimize productivity and costs while building a modern, event-driven solution.", - "api_id": "eventarc.googleapis.com", - "api_shortname": "eventarc", - "client_documentation": "https://cloud.google.com/python/docs/reference/eventarc/latest", - "default_version": "v1", - "distribution_name": "google-cloud-eventarc", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "eventarc", - "name_pretty": "Eventarc", - "product_documentation": "https://cloud.google.com/eventarc/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "lets you asynchronously deliver events from Google services, SaaS, and your own apps using loosely coupled services that react to state changes. Eventarc requires no infrastructure management, you can optimize productivity and costs while building a modern, event-driven solution.", + "api_id": "eventarc.googleapis.com", + "api_shortname": "eventarc", + "client_documentation": "https://cloud.google.com/python/docs/reference/eventarc/latest", + "default_version": "v1", + "distribution_name": "google-cloud-eventarc", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "eventarc", + "name_pretty": "Eventarc", + "product_documentation": "https://cloud.google.com/eventarc/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-filestore/.repo-metadata.json b/packages/google-cloud-filestore/.repo-metadata.json index 4936058710f1..5650809edf22 100644 --- a/packages/google-cloud-filestore/.repo-metadata.json +++ b/packages/google-cloud-filestore/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Filestore instances are fully managed NFS file servers on Google Cloud for use with applications running on Compute Engine virtual machines (VMs) instances or Google Kubernetes Engine clusters.", - "api_id": "file.googleapis.com", - "api_shortname": "file", - "client_documentation": "https://cloud.google.com/python/docs/reference/file/latest", - "default_version": "v1", - "distribution_name": "google-cloud-filestore", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "file", - "name_pretty": "Filestore", - "product_documentation": "https://cloud.google.com/filestore/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Filestore instances are fully managed NFS file servers on Google Cloud for use with applications running on Compute Engine virtual machines (VMs) instances or Google Kubernetes Engine clusters.", + "api_id": "file.googleapis.com", + "api_shortname": "file", + "client_documentation": "https://cloud.google.com/python/docs/reference/file/latest", + "default_version": "v1", + "distribution_name": "google-cloud-filestore", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "file", + "name_pretty": "Filestore", + "product_documentation": "https://cloud.google.com/filestore/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-financialservices/.repo-metadata.json b/packages/google-cloud-financialservices/.repo-metadata.json index ddd951241f59..e8318a54a950 100644 --- a/packages/google-cloud-financialservices/.repo-metadata.json +++ b/packages/google-cloud-financialservices/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Google Cloud's Anti Money Laundering AI (AML AI) product is an API that scores AML risk. Use it to identify more risk, more defensibly, with fewer false positives and reduced time per review.", - "api_id": "financialservices.googleapis.com", - "api_shortname": "financialservices", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-financialservices/latest", - "default_version": "v1", - "distribution_name": "google-cloud-financialservices", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-financialservices", - "name_pretty": "Anti Money Laundering AI API", - "product_documentation": "https://cloud.google.com/financial-services/anti-money-laundering/docs/concepts/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Google Cloud's Anti Money Laundering AI (AML AI) product is an API that scores AML risk. Use it to identify more risk, more defensibly, with fewer false positives and reduced time per review.", + "api_id": "financialservices.googleapis.com", + "api_shortname": "financialservices", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-financialservices/latest", + "default_version": "v1", + "distribution_name": "google-cloud-financialservices", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-financialservices", + "name_pretty": "Anti Money Laundering AI API", + "product_documentation": "https://cloud.google.com/financial-services/anti-money-laundering/docs/concepts/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-firestore/.repo-metadata.json b/packages/google-cloud-firestore/.repo-metadata.json index b1c39b9a0b07..858bcca32701 100644 --- a/packages/google-cloud-firestore/.repo-metadata.json +++ b/packages/google-cloud-firestore/.repo-metadata.json @@ -1,18 +1,16 @@ { - "name": "firestore", - "name_pretty": "Cloud Firestore API", - "product_documentation": "https://cloud.google.com/firestore", + "api_description": "is a fully-managed NoSQL document database for mobile, web, and server development from Firebase and Google Cloud Platform. It's backed by a multi-region replicated database that ensures once data is committed, it's durable even in the face of unexpected disasters. Not only that, but despite being a distributed database, it's also strongly consistent and offers seamless integration with other Firebase and Google Cloud Platform products, including Google Cloud Functions.", + "api_id": "firestore.googleapis.com", + "api_shortname": "firestore", "client_documentation": "https://cloud.google.com/python/docs/reference/firestore/latest", + "default_version": "v1", + "distribution_name": "google-cloud-firestore", "issue_tracker": "https://issuetracker.google.com/savedsearches/5337669", - "release_level": "stable", "language": "python", "library_type": "GAPIC_COMBO", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-firestore", - "api_id": "firestore.googleapis.com", - "requires_billing": true, - "default_version": "v1", - "codeowner_team": "@googleapis/api-firestore @googleapis/api-firestore-partners", - "api_shortname": "firestore", - "api_description": "is a fully-managed NoSQL document database for mobile, web, and server development from Firebase and Google Cloud Platform. It's backed by a multi-region replicated database that ensures once data is committed, it's durable even in the face of unexpected disasters. Not only that, but despite being a distributed database, it's also strongly consistent and offers seamless integration with other Firebase and Google Cloud Platform products, including Google Cloud Functions." -} + "name": "firestore", + "name_pretty": "Cloud Firestore API", + "product_documentation": "https://cloud.google.com/firestore", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-firestore/README.rst b/packages/google-cloud-firestore/README.rst index 5947f7aa97eb..cbb1eaa2650f 100644 --- a/packages/google-cloud-firestore/README.rst +++ b/packages/google-cloud-firestore/README.rst @@ -61,14 +61,14 @@ Supported Python Versions Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of Python. -Python >= 3.7, including 3.14 +Python >= 3.9, including 3.14 .. _active: https://devguide.python.org/devcycle/#in-development-main-branch .. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python <= 3.6 +Python <= 3.8 If you are using an `end-of-life`_ version of Python, we recommend that you update as soon as possible to an actively supported version. diff --git a/packages/google-cloud-firestore/docs/README.rst b/packages/google-cloud-firestore/docs/README.rst index 5947f7aa97eb..cbb1eaa2650f 100644 --- a/packages/google-cloud-firestore/docs/README.rst +++ b/packages/google-cloud-firestore/docs/README.rst @@ -61,14 +61,14 @@ Supported Python Versions Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of Python. -Python >= 3.7, including 3.14 +Python >= 3.9, including 3.14 .. _active: https://devguide.python.org/devcycle/#in-development-main-branch .. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python <= 3.6 +Python <= 3.8 If you are using an `end-of-life`_ version of Python, we recommend that you update as soon as possible to an actively supported version. diff --git a/packages/google-cloud-firestore/google/cloud/firestore_admin_v1/types/field.py b/packages/google-cloud-firestore/google/cloud/firestore_admin_v1/types/field.py index 4ead1b809e2d..044548eda0f4 100644 --- a/packages/google-cloud-firestore/google/cloud/firestore_admin_v1/types/field.py +++ b/packages/google-cloud-firestore/google/cloud/firestore_admin_v1/types/field.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +import google.protobuf.duration_pb2 as duration_pb2 # type: ignore import proto # type: ignore from google.cloud.firestore_admin_v1.types import index @@ -123,19 +124,34 @@ class TtlConfig(proto.Message): r"""The TTL (time-to-live) configuration for documents that have this ``Field`` set. - Storing a timestamp value into a TTL-enabled field will be treated - as the document's absolute expiration time. For Enterprise edition - databases, the timestamp value may also be stored in an array value - in the TTL-enabled field. + A timestamp stored in a TTL-enabled field will be used to determine + the expiration time of the document. The expiration time is the sum + of the timestamp value and the ``expiration_offset``. - Timestamp values in the past indicate that the document is eligible - for immediate expiration. Using any other data type or leaving the - field absent will disable expiration for the individual document. + For Enterprise edition databases, the timestamp value may + alternatively be stored in an array value in the TTL-enabled field. + + An expiration time in the past indicates that the document is + eligible for immediate expiration. Using any other data type or + leaving the field absent will disable expiration for the individual + document. Attributes: state (google.cloud.firestore_admin_v1.types.Field.TtlConfig.State): Output only. The state of the TTL configuration. + expiration_offset (google.protobuf.duration_pb2.Duration): + Optional. The offset, relative to the timestamp value from + the TTL-enabled field, used to determine the document's + expiration time. + + ``expiration_offset.seconds`` must be between 0 and + 2,147,483,647 inclusive. Values more precise than seconds + are rejected. + + If unset, defaults to 0, in which case the expiration time + is the same as the timestamp value from the TTL-enabled + field. """ class State(proto.Enum): @@ -172,6 +188,11 @@ class State(proto.Enum): number=1, enum="Field.TtlConfig.State", ) + expiration_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) name: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-firestore/google/cloud/firestore_admin_v1/types/operation.py b/packages/google-cloud-firestore/google/cloud/firestore_admin_v1/types/operation.py index 1e14f2fec08f..2592cc043a8e 100644 --- a/packages/google-cloud-firestore/google/cloud/firestore_admin_v1/types/operation.py +++ b/packages/google-cloud-firestore/google/cloud/firestore_admin_v1/types/operation.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +import google.protobuf.duration_pb2 as duration_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import proto # type: ignore @@ -210,6 +211,10 @@ class TtlConfigDelta(proto.Message): change_type (google.cloud.firestore_admin_v1.types.FieldOperationMetadata.TtlConfigDelta.ChangeType): Specifies how the TTL configuration is changing. + expiration_offset (google.protobuf.duration_pb2.Duration): + The offset, relative to the timestamp value + in the TTL-enabled field, used determine the + document's expiration time. """ class ChangeType(proto.Enum): @@ -233,6 +238,11 @@ class ChangeType(proto.Enum): number=1, enum="FieldOperationMetadata.TtlConfigDelta.ChangeType", ) + expiration_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) start_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, diff --git a/packages/google-cloud-firestore/google/cloud/firestore_v1/pipeline_source.py b/packages/google-cloud-firestore/google/cloud/firestore_v1/pipeline_source.py index 7075797b3d57..c70159dd163b 100644 --- a/packages/google-cloud-firestore/google/cloud/firestore_v1/pipeline_source.py +++ b/packages/google-cloud-firestore/google/cloud/firestore_v1/pipeline_source.py @@ -19,7 +19,7 @@ from __future__ import annotations -from typing import Generic, TypeVar, TYPE_CHECKING +from typing import TYPE_CHECKING, Generic, TypeVar from google.cloud.firestore_v1 import pipeline_stages as stages from google.cloud.firestore_v1._helpers import DOCUMENT_PATH_DELIMITER diff --git a/packages/google-cloud-firestore/google/cloud/firestore_v1/pipeline_stages.py b/packages/google-cloud-firestore/google/cloud/firestore_v1/pipeline_stages.py index cac9c70d4b99..314b5b21d2b6 100644 --- a/packages/google-cloud-firestore/google/cloud/firestore_v1/pipeline_stages.py +++ b/packages/google-cloud-firestore/google/cloud/firestore_v1/pipeline_stages.py @@ -26,10 +26,10 @@ from google.cloud.firestore_v1._helpers import encode_value from google.cloud.firestore_v1.base_vector_query import DistanceMeasure from google.cloud.firestore_v1.pipeline_expressions import ( + CONSTANT_TYPE, AggregateFunction, AliasedExpression, BooleanExpression, - CONSTANT_TYPE, Expression, Field, Ordering, diff --git a/packages/google-cloud-firestore/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py b/packages/google-cloud-firestore/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py index 5541cbda77fe..899a0e57d3c9 100644 --- a/packages/google-cloud-firestore/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py +++ b/packages/google-cloud-firestore/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py @@ -20831,7 +20831,7 @@ def test_update_field_rest_call_success(request_type): "ancestor_field": "ancestor_field_value", "reverting": True, }, - "ttl_config": {"state": 1}, + "ttl_config": {"state": 1, "expiration_offset": {"seconds": 751, "nanos": 543}}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-firestore/tests/unit/v1/test_pipeline_stages.py b/packages/google-cloud-firestore/tests/unit/v1/test_pipeline_stages.py index 65685e6e33d6..4c5a68ce393c 100644 --- a/packages/google-cloud-firestore/tests/unit/v1/test_pipeline_stages.py +++ b/packages/google-cloud-firestore/tests/unit/v1/test_pipeline_stages.py @@ -530,6 +530,7 @@ def test_ctor(self): def test_ctor_extended_types(self): import datetime + from google.cloud.firestore_v1._helpers import GeoPoint from google.cloud.firestore_v1.vector import Vector @@ -567,6 +568,7 @@ def test_repr(self): def test_to_pb_constant_types(self): import datetime + from google.cloud.firestore_v1._helpers import GeoPoint from google.cloud.firestore_v1.vector import Vector diff --git a/packages/google-cloud-functions/.repo-metadata.json b/packages/google-cloud-functions/.repo-metadata.json index f0180cfa6dfe..12d89422976d 100644 --- a/packages/google-cloud-functions/.repo-metadata.json +++ b/packages/google-cloud-functions/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "is a scalable pay as you go Functions-as-a-Service (FaaS) to run your code with zero server management.", - "api_id": "cloudfunctions.googleapis.com", - "api_shortname": "cloudfunctions", - "client_documentation": "https://cloud.google.com/python/docs/reference/cloudfunctions/latest", - "default_version": "v1", - "distribution_name": "google-cloud-functions", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559729", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "cloudfunctions", - "name_pretty": "Cloud Functions", - "product_documentation": "https://cloud.google.com/functions/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "is a scalable pay as you go Functions-as-a-Service (FaaS) to run your code with zero server management.", + "api_id": "cloudfunctions.googleapis.com", + "api_shortname": "cloudfunctions", + "client_documentation": "https://cloud.google.com/python/docs/reference/cloudfunctions/latest", + "default_version": "v1", + "distribution_name": "google-cloud-functions", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559729", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "cloudfunctions", + "name_pretty": "Cloud Functions", + "product_documentation": "https://cloud.google.com/functions/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-gdchardwaremanagement/.repo-metadata.json b/packages/google-cloud-gdchardwaremanagement/.repo-metadata.json index c15ed4ae09af..a9263ddf766d 100644 --- a/packages/google-cloud-gdchardwaremanagement/.repo-metadata.json +++ b/packages/google-cloud-gdchardwaremanagement/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Google Distributed Cloud connected allows you to run Kubernetes clusters on dedicated hardware provided and maintained by Google that is separate from the Google Cloud data center.", - "api_id": "gdchardwaremanagement.googleapis.com", - "api_shortname": "gdchardwaremanagement", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-gdchardwaremanagement/latest", - "default_version": "v1alpha", - "distribution_name": "google-cloud-gdchardwaremanagement", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1563150", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-gdchardwaremanagement", - "name_pretty": "GDC Hardware Management API", - "product_documentation": "https://cloud.google.com/distributed-cloud/edge/latest/docs", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Google Distributed Cloud connected allows you to run Kubernetes clusters on dedicated hardware provided and maintained by Google that is separate from the Google Cloud data center.", + "api_id": "gdchardwaremanagement.googleapis.com", + "api_shortname": "gdchardwaremanagement", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-gdchardwaremanagement/latest", + "default_version": "v1alpha", + "distribution_name": "google-cloud-gdchardwaremanagement", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1563150", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-gdchardwaremanagement", + "name_pretty": "GDC Hardware Management API", + "product_documentation": "https://cloud.google.com/distributed-cloud/edge/latest/docs", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-geminidataanalytics/.repo-metadata.json b/packages/google-cloud-geminidataanalytics/.repo-metadata.json index e0ab6e165c37..a984144311f7 100644 --- a/packages/google-cloud-geminidataanalytics/.repo-metadata.json +++ b/packages/google-cloud-geminidataanalytics/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Developers can use the Conversational Analytics API, accessed through geminidataanalytics.googleapis.com, to build an artificial intelligence (AI)-powered chat interface, or data agent, that answers questions about structured data in BigQuery, Looker, and Looker Studio using natural language.", - "api_id": "geminidataanalytics.googleapis.com", - "api_shortname": "geminidataanalytics", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-geminidataanalytics/latest", - "default_version": "v1alpha", - "distribution_name": "google-cloud-geminidataanalytics", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-geminidataanalytics", - "name_pretty": "Data Analytics API with Gemini", - "product_documentation": "https://cloud.google.com/gemini/docs/conversational-analytics-api/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Developers can use the Conversational Analytics API, accessed through geminidataanalytics.googleapis.com, to build an artificial intelligence (AI)-powered chat interface, or data agent, that answers questions about structured data in BigQuery, Looker, and Looker Studio using natural language.", + "api_id": "geminidataanalytics.googleapis.com", + "api_shortname": "geminidataanalytics", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-geminidataanalytics/latest", + "default_version": "v1alpha", + "distribution_name": "google-cloud-geminidataanalytics", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-geminidataanalytics", + "name_pretty": "Data Analytics API with Gemini", + "product_documentation": "https://cloud.google.com/gemini/docs/conversational-analytics-api/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-gke-backup/.repo-metadata.json b/packages/google-cloud-gke-backup/.repo-metadata.json index 95e0aa6ca751..c22c5757f099 100644 --- a/packages/google-cloud-gke-backup/.repo-metadata.json +++ b/packages/google-cloud-gke-backup/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "An API for backing up and restoring workloads in GKE.", - "api_id": "gkebackup.googleapis.com", - "api_shortname": "gkebackup", - "client_documentation": "https://cloud.google.com/python/docs/reference/gkebackup/latest", - "default_version": "v1", - "distribution_name": "google-cloud-gke-backup", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "gkebackup", - "name_pretty": "Backup for GKE", - "product_documentation": "https://cloud.google.com/kubernetes-engine/docs/add-on/backup-for-gke/concepts/backup-for-gke", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "An API for backing up and restoring workloads in GKE.", + "api_id": "gkebackup.googleapis.com", + "api_shortname": "gkebackup", + "client_documentation": "https://cloud.google.com/python/docs/reference/gkebackup/latest", + "default_version": "v1", + "distribution_name": "google-cloud-gke-backup", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1126471", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "gkebackup", + "name_pretty": "Backup for GKE", + "product_documentation": "https://cloud.google.com/kubernetes-engine/docs/add-on/backup-for-gke/concepts/backup-for-gke", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-gke-connect-gateway/.repo-metadata.json b/packages/google-cloud-gke-connect-gateway/.repo-metadata.json index ddcb18884b5a..1f69c1a23800 100644 --- a/packages/google-cloud-gke-connect-gateway/.repo-metadata.json +++ b/packages/google-cloud-gke-connect-gateway/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "builds on the power of fleets to let Anthos users connect to and run commands against registered Anthos clusters in a simple, consistent, and secured way, whether the clusters are on Google Cloud, other public clouds, or on premises, and makes it easier to automate DevOps processes across all your clusters.", - "api_id": "connectgateway.googleapis.com", - "api_shortname": "connectgateway", - "client_documentation": "https://cloud.google.com/python/docs/reference/connectgateway/latest", - "default_version": "v1", - "distribution_name": "google-cloud-gke-connect-gateway", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "connectgateway", - "name_pretty": "GKE Connect Gateway", - "product_documentation": "https://cloud.google.com/anthos/multicluster-management/gateway", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "builds on the power of fleets to let Anthos users connect to and run commands against registered Anthos clusters in a simple, consistent, and secured way, whether the clusters are on Google Cloud, other public clouds, or on premises, and makes it easier to automate DevOps processes across all your clusters.", + "api_id": "connectgateway.googleapis.com", + "api_shortname": "connectgateway", + "client_documentation": "https://cloud.google.com/python/docs/reference/connectgateway/latest", + "default_version": "v1", + "distribution_name": "google-cloud-gke-connect-gateway", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1618911", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "connectgateway", + "name_pretty": "GKE Connect Gateway", + "product_documentation": "https://cloud.google.com/anthos/multicluster-management/gateway", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-gke-hub/.repo-metadata.json b/packages/google-cloud-gke-hub/.repo-metadata.json index 8b21eb773583..2ad7c1da50ca 100644 --- a/packages/google-cloud-gke-hub/.repo-metadata.json +++ b/packages/google-cloud-gke-hub/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "provides a unified way to work with Kubernetes clusters as part of Anthos, extending GKE to work in multiple environments. You have consistent, unified, and secure infrastructure, cluster, and container management, whether you're using Anthos on Google Cloud (with traditional GKE), hybrid cloud, or multiple public clouds.", - "api_id": "gkehub.googleapis.com", - "api_shortname": "gkehub", - "client_documentation": "https://cloud.google.com/python/docs/reference/gkehub/latest", - "default_version": "v1", - "distribution_name": "google-cloud-gke-hub", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "gkehub", - "name_pretty": "GKE Hub", - "product_documentation": "https://cloud.google.com/anthos/gke/docs/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "provides a unified way to work with Kubernetes clusters as part of Anthos, extending GKE to work in multiple environments. You have consistent, unified, and secure infrastructure, cluster, and container management, whether you're using Anthos on Google Cloud (with traditional GKE), hybrid cloud, or multiple public clouds.", + "api_id": "gkehub.googleapis.com", + "api_shortname": "gkehub", + "client_documentation": "https://cloud.google.com/python/docs/reference/gkehub/latest", + "default_version": "v1", + "distribution_name": "google-cloud-gke-hub", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "gkehub", + "name_pretty": "GKE Hub", + "product_documentation": "https://cloud.google.com/anthos/gke/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/types/feature.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/types/feature.py index 2f368458b4cc..6a750557b96f 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/types/feature.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/types/feature.py @@ -17,12 +17,15 @@ from typing import MutableMapping, MutableSequence -from google.cloud.gkehub_v1 import configmanagement_v1 # type: ignore -from google.cloud.gkehub_v1 import multiclusteringress_v1 # type: ignore -from google.cloud.gkehub_v1 import rbacrolebindingactuation_v1 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import proto # type: ignore +from google.cloud.gkehub_v1 import ( + configmanagement_v1, # type: ignore + multiclusteringress_v1, # type: ignore + rbacrolebindingactuation_v1, # type: ignore +) + __protobuf__ = proto.module( package="google.cloud.gkehub.v1", manifest={ diff --git a/packages/google-cloud-gke-multicloud/.repo-metadata.json b/packages/google-cloud-gke-multicloud/.repo-metadata.json index 2f8dd33f43c3..c4d1b0bb8ba5 100644 --- a/packages/google-cloud-gke-multicloud/.repo-metadata.json +++ b/packages/google-cloud-gke-multicloud/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "An API for provisioning and managing GKE clusters running on AWS and Azure infrastructure through a centralized Google Cloud backed control plane.", - "api_id": "gkemulticloud.googleapis.com", - "api_shortname": "gkemulticloud", - "client_documentation": "https://cloud.google.com/python/docs/reference/gkemulticloud/latest", - "default_version": "v1", - "distribution_name": "google-cloud-gke-multicloud", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "gkemulticloud", - "name_pretty": "Anthos Multicloud", - "product_documentation": "https://cloud.google.com/anthos/clusters/docs/multi-cloud", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "An API for provisioning and managing GKE clusters running on AWS and Azure infrastructure through a centralized Google Cloud backed control plane.", + "api_id": "gkemulticloud.googleapis.com", + "api_shortname": "gkemulticloud", + "client_documentation": "https://cloud.google.com/python/docs/reference/gkemulticloud/latest", + "default_version": "v1", + "distribution_name": "google-cloud-gke-multicloud", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=997904\u0026template=1807166", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "gkemulticloud", + "name_pretty": "Anthos Multicloud", + "product_documentation": "https://cloud.google.com/anthos/clusters/docs/multi-cloud", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-gkerecommender/.repo-metadata.json b/packages/google-cloud-gkerecommender/.repo-metadata.json index 0fe21dead60e..546bf4e0bed2 100644 --- a/packages/google-cloud-gkerecommender/.repo-metadata.json +++ b/packages/google-cloud-gkerecommender/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "GKE Recommender API", - "api_id": "gkerecommender.googleapis.com", - "api_shortname": "gkerecommender", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-gkerecommender/latest", - "default_version": "v1", - "distribution_name": "google-cloud-gkerecommender", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1790908", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-gkerecommender", - "name_pretty": "GKE Recommender API", - "product_documentation": "https://cloud.google.com/kubernetes-engine/docs/how-to/machine-learning/inference-quickstart", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" -} + "api_description": "GKE Recommender API", + "api_id": "gkerecommender.googleapis.com", + "api_shortname": "gkerecommender", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-gkerecommender/latest", + "default_version": "v1", + "distribution_name": "google-cloud-gkerecommender", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1790908", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-gkerecommender", + "name_pretty": "GKE Recommender API", + "product_documentation": "https://cloud.google.com/kubernetes-engine/docs/how-to/machine-learning/inference-quickstart", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-gsuiteaddons/.repo-metadata.json b/packages/google-cloud-gsuiteaddons/.repo-metadata.json index 8cff68364a9f..b31919e028c2 100644 --- a/packages/google-cloud-gsuiteaddons/.repo-metadata.json +++ b/packages/google-cloud-gsuiteaddons/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Add-ons are customized applications that integrate with Google Workspace applications.", - "api_id": "gsuiteaddons.googleapis.com", - "api_shortname": "gsuiteaddons", - "client_documentation": "https://cloud.google.com/python/docs/reference/gsuiteaddons/latest", - "default_version": "v1", - "distribution_name": "google-cloud-gsuiteaddons", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "gsuiteaddons", - "name_pretty": "Google Workspace Add-ons API", - "product_documentation": "https://developers.google.com/workspace/add-ons/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Add-ons are customized applications that integrate with Google Workspace applications.", + "api_id": "gsuiteaddons.googleapis.com", + "api_shortname": "gsuiteaddons", + "client_documentation": "https://cloud.google.com/python/docs/reference/gsuiteaddons/latest", + "default_version": "v1", + "distribution_name": "google-cloud-gsuiteaddons", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "gsuiteaddons", + "name_pretty": "Google Workspace Add-ons API", + "product_documentation": "https://developers.google.com/workspace/add-ons/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-hypercomputecluster/.repo-metadata.json b/packages/google-cloud-hypercomputecluster/.repo-metadata.json index 4b3f215a2ca4..22b52d9691af 100644 --- a/packages/google-cloud-hypercomputecluster/.repo-metadata.json +++ b/packages/google-cloud-hypercomputecluster/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "The Cluster Director API allows you to deploy, manage, and monitor clusters that run AI, ML, or HPC workloads.", - "api_id": "hypercomputecluster.googleapis.com", - "api_shortname": "hypercomputecluster", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-hypercomputecluster/latest", - "default_version": "v1", - "distribution_name": "google-cloud-hypercomputecluster", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1907878&template=2195617", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-hypercomputecluster", - "name_pretty": "Cluster Director API", - "product_documentation": "https://cloud.google.com/blog/products/compute/managed-slurm-and-other-cluster-director-enhancements", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" -} + "api_description": "The Cluster Director API allows you to deploy, manage, and monitor clusters that run AI, ML, or HPC workloads.", + "api_id": "hypercomputecluster.googleapis.com", + "api_shortname": "hypercomputecluster", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-hypercomputecluster/latest", + "default_version": "v1", + "distribution_name": "google-cloud-hypercomputecluster", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1907878\u0026template=2195617", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-hypercomputecluster", + "name_pretty": "Cluster Director API", + "product_documentation": "https://cloud.google.com/blog/products/compute/managed-slurm-and-other-cluster-director-enhancements", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-iam-logging/.repo-metadata.json b/packages/google-cloud-iam-logging/.repo-metadata.json index 37b7e64b9592..5a1f3b4cda55 100644 --- a/packages/google-cloud-iam-logging/.repo-metadata.json +++ b/packages/google-cloud-iam-logging/.repo-metadata.json @@ -1,14 +1,13 @@ { - "api_id": "", - "client_documentation": "https://cloud.google.com/python/docs/reference/iamlogging/latest", - "default_version": "v1", - "distribution_name": "google-cloud-iam-logging", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "OTHER", - "name": "iamlogging", - "name_pretty": "IAM Logging Protos", - "product_documentation": "https://cloud.google.com/iam/docs/audit-logging", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "client_documentation": "https://cloud.google.com/python/docs/reference/iamlogging/latest", + "default_version": "v1", + "distribution_name": "google-cloud-iam-logging", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "OTHER", + "name": "iamlogging", + "name_pretty": "IAM Logging Protos", + "product_documentation": "https://cloud.google.com/iam/docs/audit-logging", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-iam/.repo-metadata.json b/packages/google-cloud-iam/.repo-metadata.json index 314afbbb9125..34310210ce10 100644 --- a/packages/google-cloud-iam/.repo-metadata.json +++ b/packages/google-cloud-iam/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Manages identity and access control for Google Cloud Platform resources, including the creation of service accounts, which you can use to authenticate to Google and make API calls.", - "api_id": "iam.googleapis.com", - "api_shortname": "iamcredentials", - "client_documentation": "https://cloud.google.com/python/docs/reference/iam/latest", - "default_version": "v2", - "distribution_name": "google-cloud-iam", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559761", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "iam", - "name_pretty": "Cloud Identity and Access Management", - "product_documentation": "https://cloud.google.com/iam/docs/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Manages identity and access control for Google Cloud Platform resources, including the creation of service accounts, which you can use to authenticate to Google and make API calls.", + "api_id": "iam.googleapis.com", + "api_shortname": "iamcredentials", + "client_documentation": "https://cloud.google.com/python/docs/reference/iam/latest", + "default_version": "v2", + "distribution_name": "google-cloud-iam", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559761", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "iam", + "name_pretty": "Cloud Identity and Access Management", + "product_documentation": "https://cloud.google.com/iam/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-iap/.repo-metadata.json b/packages/google-cloud-iap/.repo-metadata.json index 1d6056c72de6..d98e04c0a549 100644 --- a/packages/google-cloud-iap/.repo-metadata.json +++ b/packages/google-cloud-iap/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Identity-Aware Proxy includes a number of features that can be used to protect access to Google Cloud hosted resources and applications hosted on Google Cloud.", - "api_id": "iap.googleapis.com", - "api_shortname": "iap", - "client_documentation": "https://cloud.google.com/python/docs/reference/iap/latest", - "default_version": "v1", - "distribution_name": "google-cloud-iap", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "iap", - "name_pretty": "Identity-Aware Proxy", - "product_documentation": "https://cloud.google.com/iap", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Identity-Aware Proxy includes a number of features that can be used to protect access to Google Cloud hosted resources and applications hosted on Google Cloud.", + "api_id": "iap.googleapis.com", + "api_shortname": "iap", + "client_documentation": "https://cloud.google.com/python/docs/reference/iap/latest", + "default_version": "v1", + "distribution_name": "google-cloud-iap", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "iap", + "name_pretty": "Identity-Aware Proxy", + "product_documentation": "https://cloud.google.com/iap", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-ids/.repo-metadata.json b/packages/google-cloud-ids/.repo-metadata.json index cabf017000bd..55bd8f09eaf6 100644 --- a/packages/google-cloud-ids/.repo-metadata.json +++ b/packages/google-cloud-ids/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Cloud IDS is an intrusion detection service that provides threat detection for intrusions, malware, spyware, and command-and-control attacks on your network. Cloud IDS works by creating a Google-managed peered network with mirrored VMs. Traffic in the peered network is mirrored, and then inspected by Palo Alto Networks threat protection technologies to provide advanced threat detection.", - "api_id": "ids.googleapis.com", - "api_shortname": "ids", - "client_documentation": "https://cloud.google.com/python/docs/reference/ids/latest", - "default_version": "v1", - "distribution_name": "google-cloud-ids", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "ids", - "name_pretty": "Cloud IDS", - "product_documentation": "https://cloud.google.com/intrusion-detection-system/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Cloud IDS is an intrusion detection service that provides threat detection for intrusions, malware, spyware, and command-and-control attacks on your network. Cloud IDS works by creating a Google-managed peered network with mirrored VMs. Traffic in the peered network is mirrored, and then inspected by Palo Alto Networks threat protection technologies to provide advanced threat detection.", + "api_id": "ids.googleapis.com", + "api_shortname": "ids", + "client_documentation": "https://cloud.google.com/python/docs/reference/ids/latest", + "default_version": "v1", + "distribution_name": "google-cloud-ids", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "ids", + "name_pretty": "Cloud IDS", + "product_documentation": "https://cloud.google.com/intrusion-detection-system/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-kms-inventory/.repo-metadata.json b/packages/google-cloud-kms-inventory/.repo-metadata.json index 452f9c8907df..d248429a1498 100644 --- a/packages/google-cloud-kms-inventory/.repo-metadata.json +++ b/packages/google-cloud-kms-inventory/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "KMS Inventory API", - "api_id": "inventory.googleapis.com", - "api_shortname": "inventory", - "client_documentation": "https://cloud.google.com/python/docs/reference/inventory/latest", - "default_version": "v1", - "distribution_name": "google-cloud-kms-inventory", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=190860&template=819701", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "inventory", - "name_pretty": "KMS Inventory API", - "product_documentation": "https://cloud.google.com/kms/docs/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "KMS Inventory API", + "api_id": "inventory.googleapis.com", + "api_shortname": "inventory", + "client_documentation": "https://cloud.google.com/python/docs/reference/inventory/latest", + "default_version": "v1", + "distribution_name": "google-cloud-kms-inventory", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=190860\u0026template=819701", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "inventory", + "name_pretty": "KMS Inventory API", + "product_documentation": "https://cloud.google.com/kms/docs/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-kms/.repo-metadata.json b/packages/google-cloud-kms/.repo-metadata.json index 24ec09abccf7..e876fb246a37 100644 --- a/packages/google-cloud-kms/.repo-metadata.json +++ b/packages/google-cloud-kms/.repo-metadata.json @@ -1,29 +1,16 @@ { - "api_description": "a cloud-hosted key management service that lets you manage cryptographic keys for your cloud services the same way you do on-premises. You can generate, use, rotate, and destroy AES256, RSA 2048, RSA 3072, RSA 4096, EC P256, and EC P384 cryptographic keys. Cloud KMS is integrated with Cloud IAM and Cloud Audit Logging so that you can manage permissions on individual keys and monitor how these are used. Use Cloud KMS to protect secrets and other sensitive data that you need to store in Google Cloud Platform.", - "api_id": "cloudkms.googleapis.com", - "api_shortname": "cloudkms", - "client_documentation": "https://cloud.google.com/python/docs/reference/cloudkms/latest", - "client_library": true, - "custom_content": "The Google Cloud KMS API is a service that allows you to keep encryption keys centrally in the cloud, for direct use by cloud services. More info about Cloud KMS can be found at https://cloud.google.com/kms/docs/", - "default_version": "v1", - "distribution_name": "google-cloud-kms", - "issue_tracker": "https://issuetracker.google.com/savedsearches/5264932", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "cloudkms", - "name_pretty": "Google Cloud Key Management Service", - "product_documentation": "https://cloud.google.com/kms", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true, - "sample_project_dir": "samples/snippets/", - "samples": [ - { - "custom_content": "More information about the Cloud KMS quickstart is available at https://cloud.google.com/kms/docs/quickstart", - "description": "This quickstart shows you how to create and use encryption keys with Cloud Key Management Service.", - "file": "quickstart.py", - "name": "Quickstart", - "runnable": true - } - ] + "api_description": "a cloud-hosted key management service that lets you manage cryptographic keys for your cloud services the same way you do on-premises. You can generate, use, rotate, and destroy AES256, RSA 2048, RSA 3072, RSA 4096, EC P256, and EC P384 cryptographic keys. Cloud KMS is integrated with Cloud IAM and Cloud Audit Logging so that you can manage permissions on individual keys and monitor how these are used. Use Cloud KMS to protect secrets and other sensitive data that you need to store in Google Cloud Platform.", + "api_id": "cloudkms.googleapis.com", + "api_shortname": "cloudkms", + "client_documentation": "https://cloud.google.com/python/docs/reference/cloudkms/latest", + "default_version": "v1", + "distribution_name": "google-cloud-kms", + "issue_tracker": "https://issuetracker.google.com/savedsearches/5264932", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "cloudkms", + "name_pretty": "Google Cloud Key Management Service", + "product_documentation": "https://cloud.google.com/kms", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-language/.repo-metadata.json b/packages/google-cloud-language/.repo-metadata.json index 2ef15d065b6d..d2552cc529c8 100644 --- a/packages/google-cloud-language/.repo-metadata.json +++ b/packages/google-cloud-language/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "provides natural language understanding technologies to developers, including sentiment analysis, entity analysis, entity sentiment analysis, content classification, and syntax analysis. This API is part of the larger Cloud Machine Learning API family.", - "api_id": "language.googleapis.com", - "api_shortname": "language", - "client_documentation": "https://cloud.google.com/python/docs/reference/language/latest", - "default_version": "v1", - "distribution_name": "google-cloud-language", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559753", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "language", - "name_pretty": "Natural Language", - "product_documentation": "https://cloud.google.com/natural-language/docs/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "provides natural language understanding technologies to developers, including sentiment analysis, entity analysis, entity sentiment analysis, content classification, and syntax analysis. This API is part of the larger Cloud Machine Learning API family.", + "api_id": "language.googleapis.com", + "api_shortname": "language", + "client_documentation": "https://cloud.google.com/python/docs/reference/language/latest", + "default_version": "v1", + "distribution_name": "google-cloud-language", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559753", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "language", + "name_pretty": "Natural Language", + "product_documentation": "https://cloud.google.com/natural-language/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-licensemanager/.repo-metadata.json b/packages/google-cloud-licensemanager/.repo-metadata.json index 57501835e5aa..3060d14212a1 100644 --- a/packages/google-cloud-licensemanager/.repo-metadata.json +++ b/packages/google-cloud-licensemanager/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "License Manager is a tool to manage and track third-party licenses on Google Cloud. ", - "api_id": "licensemanager.googleapis.com", - "api_shortname": "licensemanager", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-licensemanager/latest", - "default_version": "v1", - "distribution_name": "google-cloud-licensemanager", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1659587", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-licensemanager", - "name_pretty": "License Manager API", - "product_documentation": "https://cloud.google.com/compute/docs/instances/windows/ms-licensing", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "License Manager is a tool to manage and track third-party licenses on Google Cloud. ", + "api_id": "licensemanager.googleapis.com", + "api_shortname": "licensemanager", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-licensemanager/latest", + "default_version": "v1", + "distribution_name": "google-cloud-licensemanager", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1659587", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-licensemanager", + "name_pretty": "License Manager API", + "product_documentation": "https://cloud.google.com/compute/docs/instances/windows/ms-licensing", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-life-sciences/.repo-metadata.json b/packages/google-cloud-life-sciences/.repo-metadata.json index 67dfb437ab39..4f6c76beda34 100644 --- a/packages/google-cloud-life-sciences/.repo-metadata.json +++ b/packages/google-cloud-life-sciences/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "is a suite of services and tools for managing, processing, and transforming life sciences data.", - "api_id": "lifesciences.googleapis.com", - "api_shortname": "lifesciences", - "client_documentation": "https://cloud.google.com/python/docs/reference/lifesciences/latest", - "default_version": "v2beta", - "distribution_name": "google-cloud-life-sciences", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "lifesciences", - "name_pretty": "Cloud Life Sciences", - "product_documentation": "https://cloud.google.com/life-sciences/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "is a suite of services and tools for managing, processing, and transforming life sciences data.", + "api_id": "lifesciences.googleapis.com", + "api_shortname": "lifesciences", + "client_documentation": "https://cloud.google.com/python/docs/reference/lifesciences/latest", + "default_version": "v2beta", + "distribution_name": "google-cloud-life-sciences", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "lifesciences", + "name_pretty": "Cloud Life Sciences", + "product_documentation": "https://cloud.google.com/life-sciences/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-locationfinder/.repo-metadata.json b/packages/google-cloud-locationfinder/.repo-metadata.json index 11b80ea595d9..6828855836d7 100644 --- a/packages/google-cloud-locationfinder/.repo-metadata.json +++ b/packages/google-cloud-locationfinder/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Cloud Location Finder lets you identify and filter cloud locations in regions and zones across Google Cloud, Google Distributed Cloud, Microsoft Azure, Amazon Web Services, and Oracle Cloud Infrastructure based on proximity, geographic location, and carbon footprint.", - "api_id": "locationfinder.googleapis.com", - "api_shortname": "locationfinder", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-locationfinder/latest", - "default_version": "v1", - "distribution_name": "google-cloud-locationfinder", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1569265&template=1988535", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-locationfinder", - "name_pretty": "Cloud Location Finder API", - "product_documentation": "https://issuetracker.google.com/issues/new?component=1569265&template=1988535", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Cloud Location Finder lets you identify and filter cloud locations in regions and zones across Google Cloud, Google Distributed Cloud, Microsoft Azure, Amazon Web Services, and Oracle Cloud Infrastructure based on proximity, geographic location, and carbon footprint.", + "api_id": "locationfinder.googleapis.com", + "api_shortname": "locationfinder", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-locationfinder/latest", + "default_version": "v1", + "distribution_name": "google-cloud-locationfinder", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1569265\u0026template=1988535", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-locationfinder", + "name_pretty": "Cloud Location Finder API", + "product_documentation": "https://issuetracker.google.com/issues/new?component=1569265\u0026template=1988535", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-logging/.repo-metadata.json b/packages/google-cloud-logging/.repo-metadata.json index 30c29de3e603..c83bf03eb53b 100644 --- a/packages/google-cloud-logging/.repo-metadata.json +++ b/packages/google-cloud-logging/.repo-metadata.json @@ -1,17 +1,16 @@ { - "name": "logging", - "name_pretty": "Cloud Logging API", - "product_documentation": "https://cloud.google.com/logging/docs", + "api_description": "Writes log entries and manages your Cloud Logging configuration.", + "api_id": "logging.googleapis.com", + "api_shortname": "logging", "client_documentation": "https://cloud.google.com/python/docs/reference/logging/latest", + "default_version": "v2", + "distribution_name": "google-cloud-logging", "issue_tracker": "https://issuetracker.google.com/savedsearches/559764", - "release_level": "stable", "language": "python", "library_type": "GAPIC_COMBO", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-logging", - "api_id": "logging.googleapis.com", - "codeowner_team": "@googleapis/yoshi-python", - "default_version": "v2", - "api_shortname": "logging", - "api_description": "Writes log entries and manages your Cloud Logging configuration." -} + "name": "logging", + "name_pretty": "Cloud Logging API", + "product_documentation": "https://cloud.google.com/logging/docs", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-lustre/.repo-metadata.json b/packages/google-cloud-lustre/.repo-metadata.json index f767c608557b..f553afd50dfe 100644 --- a/packages/google-cloud-lustre/.repo-metadata.json +++ b/packages/google-cloud-lustre/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "null ", - "api_id": "lustre.googleapis.com", - "api_shortname": "lustre", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-lustre/latest", - "default_version": "v1", - "distribution_name": "google-cloud-lustre", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1625664", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-lustre", - "name_pretty": "Google Cloud Managed Lustre API", - "product_documentation": "https://cloud.google.com/managed-lustre/docs", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "null ", + "api_id": "lustre.googleapis.com", + "api_shortname": "lustre", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-lustre/latest", + "default_version": "v1", + "distribution_name": "google-cloud-lustre", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1625664", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-lustre", + "name_pretty": "Google Cloud Managed Lustre API", + "product_documentation": "https://cloud.google.com/managed-lustre/docs", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-maintenance-api/.repo-metadata.json b/packages/google-cloud-maintenance-api/.repo-metadata.json index c69e26982d60..0581237885fc 100644 --- a/packages/google-cloud-maintenance-api/.repo-metadata.json +++ b/packages/google-cloud-maintenance-api/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "The Maintenance API provides a centralized view of planned disruptive maintenance events across supported Google Cloud products. It offers users visibility into upcoming, ongoing, and completed maintenance, along with controls to manage certain maintenance activities, such as mainteance windows, rescheduling, and on-demand updates.", - "api_id": "api.googleapis.com", - "api_shortname": "api", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-maintenance-api/latest", - "default_version": "v1", - "distribution_name": "google-cloud-maintenance-api", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1673988&template=1161103", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-maintenance-api", - "name_pretty": "Maintenance API", - "product_documentation": "https://cloud.google.com/unified-maintenance/docs/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "The Maintenance API provides a centralized view of planned disruptive maintenance events across supported Google Cloud products. It offers users visibility into upcoming, ongoing, and completed maintenance, along with controls to manage certain maintenance activities, such as mainteance windows, rescheduling, and on-demand updates.", + "api_id": "api.googleapis.com", + "api_shortname": "api", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-maintenance-api/latest", + "default_version": "v1", + "distribution_name": "google-cloud-maintenance-api", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1673988\u0026template=1161103", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-maintenance-api", + "name_pretty": "Maintenance API", + "product_documentation": "https://cloud.google.com/unified-maintenance/docs/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-managed-identities/.repo-metadata.json b/packages/google-cloud-managed-identities/.repo-metadata.json index ddeefc001676..106fbc94ea1c 100644 --- a/packages/google-cloud-managed-identities/.repo-metadata.json +++ b/packages/google-cloud-managed-identities/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "is a highly available, hardened Google Cloud service running actual Microsoft AD that enables you to manage authentication and authorization for your AD-dependent workloads, automate AD server maintenance and security configuration, and connect your on-premises AD domain to the cloud.", - "api_id": "managedidentities.googleapis.com", - "api_shortname": "managedidentities", - "client_documentation": "https://cloud.google.com/python/docs/reference/managedidentities/latest", - "default_version": "v1", - "distribution_name": "google-cloud-managed-identities", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "managedidentities", - "name_pretty": "Managed Service for Microsoft Active Directory", - "product_documentation": "https://cloud.google.com/managed-microsoft-ad/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "is a highly available, hardened Google Cloud service running actual Microsoft AD that enables you to manage authentication and authorization for your AD-dependent workloads, automate AD server maintenance and security configuration, and connect your on-premises AD domain to the cloud.", + "api_id": "managedidentities.googleapis.com", + "api_shortname": "managedidentities", + "client_documentation": "https://cloud.google.com/python/docs/reference/managedidentities/latest", + "default_version": "v1", + "distribution_name": "google-cloud-managed-identities", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "managedidentities", + "name_pretty": "Managed Service for Microsoft Active Directory", + "product_documentation": "https://cloud.google.com/managed-microsoft-ad/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-managedkafka-schemaregistry/.repo-metadata.json b/packages/google-cloud-managedkafka-schemaregistry/.repo-metadata.json index c41840de85b3..0ecbb22dae8d 100644 --- a/packages/google-cloud-managedkafka-schemaregistry/.repo-metadata.json +++ b/packages/google-cloud-managedkafka-schemaregistry/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Manage Apache Kafka clusters and resources. ", - "api_id": "schemaregistry.googleapis.com", - "api_shortname": "schemaregistry", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-managedkafka-schemaregistry/latest", - "default_version": "v1", - "distribution_name": "google-cloud-managedkafka-schemaregistry", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1376234", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-managedkafka-schemaregistry", - "name_pretty": "Managed Service for Apache Kafka API", - "product_documentation": "https://cloud.google.com/managed-service-for-apache-kafka/docs", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Manage Apache Kafka clusters and resources. ", + "api_id": "schemaregistry.googleapis.com", + "api_shortname": "schemaregistry", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-managedkafka-schemaregistry/latest", + "default_version": "v1", + "distribution_name": "google-cloud-managedkafka-schemaregistry", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1376234", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-managedkafka-schemaregistry", + "name_pretty": "Managed Service for Apache Kafka API", + "product_documentation": "https://cloud.google.com/managed-service-for-apache-kafka/docs", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-managedkafka/.repo-metadata.json b/packages/google-cloud-managedkafka/.repo-metadata.json index 78b9f0d5d871..d6e38bca8aab 100644 --- a/packages/google-cloud-managedkafka/.repo-metadata.json +++ b/packages/google-cloud-managedkafka/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Managed Service for Apache Kafka API is a managed cloud service that lets you ingest Kafka streams directly into Google Cloud.", - "api_id": "managedkafka.googleapis.com", - "api_shortname": "managedkafka", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-managedkafka/latest", - "default_version": "v1", - "distribution_name": "google-cloud-managedkafka", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1376234", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-managedkafka", - "name_pretty": "Managed Service for Apache Kafka", - "product_documentation": "https://cloud.google.com/managed-kafka", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Managed Service for Apache Kafka API is a managed cloud service that lets you ingest Kafka streams directly into Google Cloud.", + "api_id": "managedkafka.googleapis.com", + "api_shortname": "managedkafka", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-managedkafka/latest", + "default_version": "v1", + "distribution_name": "google-cloud-managedkafka", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1376234", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-managedkafka", + "name_pretty": "Managed Service for Apache Kafka", + "product_documentation": "https://cloud.google.com/managed-kafka", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-media-translation/.repo-metadata.json b/packages/google-cloud-media-translation/.repo-metadata.json index a3ad7f564055..4ba67826424e 100644 --- a/packages/google-cloud-media-translation/.repo-metadata.json +++ b/packages/google-cloud-media-translation/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "provides enterprise quality translation from/to various media types.", - "api_id": "mediatranslation.googleapis.com", - "api_shortname": "mediatranslation", - "client_documentation": "https://cloud.google.com/python/docs/reference/mediatranslation/latest", - "default_version": "v1beta1", - "distribution_name": "google-cloud-media-translation", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "mediatranslation", - "name_pretty": "Media Translation", - "product_documentation": "https://cloud.google.com/media-translation", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "provides enterprise quality translation from/to various media types.", + "api_id": "mediatranslation.googleapis.com", + "api_shortname": "mediatranslation", + "client_documentation": "https://cloud.google.com/python/docs/reference/mediatranslation/latest", + "default_version": "v1beta1", + "distribution_name": "google-cloud-media-translation", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "mediatranslation", + "name_pretty": "Media Translation", + "product_documentation": "https://cloud.google.com/media-translation", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-memcache/.repo-metadata.json b/packages/google-cloud-memcache/.repo-metadata.json index a58c2b9915ed..b61bbfda4182 100644 --- a/packages/google-cloud-memcache/.repo-metadata.json +++ b/packages/google-cloud-memcache/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "is a fully-managed in-memory data store service for Memcache.", - "api_id": "memcache.googleapis.com", - "api_shortname": "memcache", - "client_documentation": "https://cloud.google.com/python/docs/reference/memcache/latest", - "default_version": "v1", - "distribution_name": "google-cloud-memcache", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "memcache", - "name_pretty": "Cloud Memorystore for Memcached", - "product_documentation": "https://cloud.google.com/memorystore/docs/memcached/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "is a fully-managed in-memory data store service for Memcache.", + "api_id": "memcache.googleapis.com", + "api_shortname": "memcache", + "client_documentation": "https://cloud.google.com/python/docs/reference/memcache/latest", + "default_version": "v1", + "distribution_name": "google-cloud-memcache", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "memcache", + "name_pretty": "Cloud Memorystore for Memcached", + "product_documentation": "https://cloud.google.com/memorystore/docs/memcached/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-memorystore/.repo-metadata.json b/packages/google-cloud-memorystore/.repo-metadata.json index 759d79a0fa44..2e3e110e0ae7 100644 --- a/packages/google-cloud-memorystore/.repo-metadata.json +++ b/packages/google-cloud-memorystore/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Memorystore for Valkey is a fully managed Valkey Cluster service for Google Cloud. Applications running on Google Cloud can achieve extreme performance by leveraging the highly scalable, available, secure Valkey service without the burden of managing complex Valkey deployments.", - "api_id": "memorystore.googleapis.com", - "api_shortname": "memorystore", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-memorystore/latest", - "default_version": "v1", - "distribution_name": "google-cloud-memorystore", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-memorystore", - "name_pretty": "Memorystore", - "product_documentation": "https://cloud.google.com/memorystore/docs/valkey", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Memorystore for Valkey is a fully managed Valkey Cluster service for Google Cloud. Applications running on Google Cloud can achieve extreme performance by leveraging the highly scalable, available, secure Valkey service without the burden of managing complex Valkey deployments.", + "api_id": "memorystore.googleapis.com", + "api_shortname": "memorystore", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-memorystore/latest", + "default_version": "v1", + "distribution_name": "google-cloud-memorystore", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-memorystore", + "name_pretty": "Memorystore", + "product_documentation": "https://cloud.google.com/memorystore/docs/valkey", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/__init__.py index 97deabfae555..2afc14a00ea5 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/__init__.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/__init__.py @@ -38,6 +38,7 @@ DiscoveryEndpoint, GetCertificateAuthorityRequest, GetInstanceRequest, + GetSharedRegionalCertificateAuthorityRequest, Instance, ListInstancesRequest, ListInstancesResponse, @@ -47,6 +48,7 @@ PscAutoConnection, PscConnection, PscConnectionStatus, + SharedRegionalCertificateAuthority, UpdateInstanceRequest, ZoneDistributionConfig, ) @@ -153,6 +155,7 @@ def _get_version(dependency_name): "DiscoveryEndpoint", "GetCertificateAuthorityRequest", "GetInstanceRequest", + "GetSharedRegionalCertificateAuthorityRequest", "Instance", "ListInstancesRequest", "ListInstancesResponse", @@ -163,6 +166,7 @@ def _get_version(dependency_name): "PscAutoConnection", "PscConnection", "PscConnectionStatus", + "SharedRegionalCertificateAuthority", "UpdateInstanceRequest", "ZoneDistributionConfig", ) diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/gapic_metadata.json b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/gapic_metadata.json index 0951913c6fff..084be27f2365 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/gapic_metadata.json +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/gapic_metadata.json @@ -30,6 +30,11 @@ "get_instance" ] }, + "GetSharedRegionalCertificateAuthority": { + "methods": [ + "get_shared_regional_certificate_authority" + ] + }, "ListInstances": { "methods": [ "list_instances" diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/client.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/client.py index c6088b577e7d..f9498342827e 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/client.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/client.py @@ -230,6 +230,28 @@ def transport(self) -> MemorystoreTransport: """ return self._transport + @staticmethod + def ca_pool_path( + project: str, + location: str, + ca_pool: str, + ) -> str: + """Returns a fully-qualified ca_pool string.""" + return "projects/{project}/locations/{location}/caPools/{ca_pool}".format( + project=project, + location=location, + ca_pool=ca_pool, + ) + + @staticmethod + def parse_ca_pool_path(path: str) -> Dict[str, str]: + """Parses a ca_pool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/caPools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def certificate_authority_path( project: str, @@ -337,6 +359,26 @@ def parse_service_attachment_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def shared_regional_certificate_authority_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified shared_regional_certificate_authority string.""" + return "projects/{project}/locations/{location}/sharedRegionalCertificateAuthority".format( + project=project, + location=location, + ) + + @staticmethod + def parse_shared_regional_certificate_authority_path(path: str) -> Dict[str, str]: + """Parses a shared_regional_certificate_authority path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/sharedRegionalCertificateAuthority$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -1582,6 +1624,123 @@ def sample_get_certificate_authority(): # Done; return the response. return response + def get_shared_regional_certificate_authority( + self, + request: Optional[ + Union[memorystore.GetSharedRegionalCertificateAuthorityRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> memorystore.SharedRegionalCertificateAuthority: + r"""Gets the details of shared regional certificate + authority information for Memorystore instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1beta + + def sample_get_shared_regional_certificate_authority(): + # Create a client + client = memorystore_v1beta.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1beta.GetSharedRegionalCertificateAuthorityRequest( + name="name_value", + ) + + # Make the request + response = client.get_shared_regional_certificate_authority(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memorystore_v1beta.types.GetSharedRegionalCertificateAuthorityRequest, dict]): + The request object. Request for + [GetSharedRegionalCertificateAuthority][google.cloud.memorystore.v1beta.Memorystore.GetSharedRegionalCertificateAuthority]. + name (str): + Required. Regional certificate authority resource name + using the form: + ``projects/{project}/locations/{location}/sharedRegionalCertificateAuthority`` + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.memorystore_v1beta.types.SharedRegionalCertificateAuthority: + Shared regional certificate authority + for an instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, memorystore.GetSharedRegionalCertificateAuthorityRequest + ): + request = memorystore.GetSharedRegionalCertificateAuthorityRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_shared_regional_certificate_authority + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "MemorystoreClient": return self diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py index 4f568a208d4e..2240c070c88d 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py @@ -201,6 +201,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.get_shared_regional_certificate_authority: gapic_v1.method.wrap_method( + self.get_shared_regional_certificate_authority, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -307,6 +312,18 @@ def get_certificate_authority( ]: raise NotImplementedError() + @property + def get_shared_regional_certificate_authority( + self, + ) -> Callable[ + [memorystore.GetSharedRegionalCertificateAuthorityRequest], + Union[ + memorystore.SharedRegionalCertificateAuthority, + Awaitable[memorystore.SharedRegionalCertificateAuthority], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py index 2732c4a9eec0..8a4dc06dbddb 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py @@ -106,6 +106,14 @@ def post_get_instance(self, response): logging.log(f"Received response: {response}") return response + def pre_get_shared_regional_certificate_authority(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_shared_regional_certificate_authority(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_instances(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -319,6 +327,58 @@ def post_get_instance_with_metadata( """ return response, metadata + def pre_get_shared_regional_certificate_authority( + self, + request: memorystore.GetSharedRegionalCertificateAuthorityRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + memorystore.GetSharedRegionalCertificateAuthorityRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_shared_regional_certificate_authority + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_get_shared_regional_certificate_authority( + self, response: memorystore.SharedRegionalCertificateAuthority + ) -> memorystore.SharedRegionalCertificateAuthority: + """Post-rpc interceptor for get_shared_regional_certificate_authority + + DEPRECATED. Please use the `post_get_shared_regional_certificate_authority_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. This `post_get_shared_regional_certificate_authority` interceptor runs + before the `post_get_shared_regional_certificate_authority_with_metadata` interceptor. + """ + return response + + def post_get_shared_regional_certificate_authority_with_metadata( + self, + response: memorystore.SharedRegionalCertificateAuthority, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + memorystore.SharedRegionalCertificateAuthority, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_shared_regional_certificate_authority + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_get_shared_regional_certificate_authority_with_metadata` + interceptor in new development instead of the `post_get_shared_regional_certificate_authority` interceptor. + When both interceptors are used, this `post_get_shared_regional_certificate_authority_with_metadata` interceptor runs after the + `post_get_shared_regional_certificate_authority` interceptor. The (possibly modified) response returned by + `post_get_shared_regional_certificate_authority` will be passed to + `post_get_shared_regional_certificate_authority_with_metadata`. + """ + return response, metadata + def pre_list_instances( self, request: memorystore.ListInstancesRequest, @@ -1302,6 +1362,165 @@ def __call__( ) return resp + class _GetSharedRegionalCertificateAuthority( + _BaseMemorystoreRestTransport._BaseGetSharedRegionalCertificateAuthority, + MemorystoreRestStub, + ): + def __hash__(self): + return hash( + "MemorystoreRestTransport.GetSharedRegionalCertificateAuthority" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: memorystore.GetSharedRegionalCertificateAuthorityRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> memorystore.SharedRegionalCertificateAuthority: + r"""Call the get shared regional + certificate authority method over HTTP. + + Args: + request (~.memorystore.GetSharedRegionalCertificateAuthorityRequest): + The request object. Request for + [GetSharedRegionalCertificateAuthority][google.cloud.memorystore.v1beta.Memorystore.GetSharedRegionalCertificateAuthority]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.memorystore.SharedRegionalCertificateAuthority: + Shared regional certificate authority + for an instance. + + """ + + http_options = _BaseMemorystoreRestTransport._BaseGetSharedRegionalCertificateAuthority._get_http_options() + + request, metadata = ( + self._interceptor.pre_get_shared_regional_certificate_authority( + request, metadata + ) + ) + transcoded_request = _BaseMemorystoreRestTransport._BaseGetSharedRegionalCertificateAuthority._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseGetSharedRegionalCertificateAuthority._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.memorystore_v1beta.MemorystoreClient.GetSharedRegionalCertificateAuthority", + extra={ + "serviceName": "google.cloud.memorystore.v1beta.Memorystore", + "rpcName": "GetSharedRegionalCertificateAuthority", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MemorystoreRestTransport._GetSharedRegionalCertificateAuthority._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = memorystore.SharedRegionalCertificateAuthority() + pb_resp = memorystore.SharedRegionalCertificateAuthority.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_shared_regional_certificate_authority( + resp + ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = ( + self._interceptor.post_get_shared_regional_certificate_authority_with_metadata( + resp, response_metadata + ) + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + memorystore.SharedRegionalCertificateAuthority.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.memorystore_v1beta.MemorystoreClient.get_shared_regional_certificate_authority", + extra={ + "serviceName": "google.cloud.memorystore.v1beta.Memorystore", + "rpcName": "GetSharedRegionalCertificateAuthority", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _ListInstances( _BaseMemorystoreRestTransport._BaseListInstances, MemorystoreRestStub ): @@ -1637,6 +1856,19 @@ def get_instance( # In C++ this would require a dynamic_cast return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + @property + def get_shared_regional_certificate_authority( + self, + ) -> Callable[ + [memorystore.GetSharedRegionalCertificateAuthorityRequest], + memorystore.SharedRegionalCertificateAuthority, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSharedRegionalCertificateAuthority( + self._session, self._host, self._interceptor + ) # type: ignore + @property def list_instances( self, diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py index 8cd96582440d..2d12561aeaa0 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py @@ -289,6 +289,55 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetSharedRegionalCertificateAuthority: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/sharedRegionalCertificateAuthority}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.GetSharedRegionalCertificateAuthorityRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseGetSharedRegionalCertificateAuthority._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseListInstances: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/__init__.py index 412ec4452e77..9b9ff31bc2d5 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/__init__.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/__init__.py @@ -21,6 +21,7 @@ DiscoveryEndpoint, GetCertificateAuthorityRequest, GetInstanceRequest, + GetSharedRegionalCertificateAuthorityRequest, Instance, ListInstancesRequest, ListInstancesResponse, @@ -30,6 +31,7 @@ PscAutoConnection, PscConnection, PscConnectionStatus, + SharedRegionalCertificateAuthority, UpdateInstanceRequest, ZoneDistributionConfig, ) @@ -41,6 +43,7 @@ "DiscoveryEndpoint", "GetCertificateAuthorityRequest", "GetInstanceRequest", + "GetSharedRegionalCertificateAuthorityRequest", "Instance", "ListInstancesRequest", "ListInstancesResponse", @@ -49,6 +52,7 @@ "PersistenceConfig", "PscAutoConnection", "PscConnection", + "SharedRegionalCertificateAuthority", "UpdateInstanceRequest", "ZoneDistributionConfig", "ConnectionType", diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py index 1ea094b0a8d8..5773aba0a9ff 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py @@ -41,6 +41,8 @@ "DeleteInstanceRequest", "GetCertificateAuthorityRequest", "CertificateAuthority", + "SharedRegionalCertificateAuthority", + "GetSharedRegionalCertificateAuthorityRequest", "OperationMetadata", }, ) @@ -161,6 +163,23 @@ class Instance(proto.Message): Optional. Endpoints for the instance. mode (google.cloud.memorystore_v1beta.types.Instance.Mode): Optional. The mode config for the instance. + server_ca_mode (google.cloud.memorystore_v1beta.types.Instance.ServerCaMode): + Optional. Immutable. The Server CA mode for + the instance. + + This field is a member of `oneof`_ ``_server_ca_mode``. + server_ca_pool (str): + Optional. Immutable. The customer-managed CA pool for the + instance. Only applicable if the Server CA mode is + CUSTOMER_MANAGED_CAS_CA. Format: + "projects/{project}/locations/{region}/caPools/{ca_pool}". + + This field is a member of `oneof`_ ``_server_ca_pool``. + rotate_server_certificate (bool): + Optional. Input only. Rotate the server + certificates. + + This field is a member of `oneof`_ ``_rotate_server_certificate``. """ class State(proto.Enum): @@ -262,6 +281,37 @@ class Mode(proto.Enum): CLUSTER = 2 CLUSTER_DISABLED = 4 + class ServerCaMode(proto.Enum): + r"""The Server CA mode for the instance. + + Values: + SERVER_CA_MODE_UNSPECIFIED (0): + Server CA mode not specified. + GOOGLE_MANAGED_PER_INSTANCE_CA (1): + Each instance has its own Google-managed CA. + GOOGLE_MANAGED_SHARED_CA (2): + The instance uses a Google-managed shared CA + for the instance's region. + CUSTOMER_MANAGED_CAS_CA (3): + The instance uses a customer-managed CA from + CAS. + SERVER_CA_MODE_GOOGLE_MANAGED_PER_INSTANCE_CA (1): + Deprecated: Use GOOGLE_MANAGED_PER_INSTANCE_CA instead. + SERVER_CA_MODE_GOOGLE_MANAGED_SHARED_CA (2): + Deprecated: Use GOOGLE_MANAGED_SHARED_CA instead. + SERVER_CA_MODE_CUSTOMER_MANAGED_CAS_CA (3): + Deprecated: Use CUSTOMER_MANAGED_CAS_CA instead. + """ + + _pb_options = {"allow_alias": True} + SERVER_CA_MODE_UNSPECIFIED = 0 + GOOGLE_MANAGED_PER_INSTANCE_CA = 1 + GOOGLE_MANAGED_SHARED_CA = 2 + CUSTOMER_MANAGED_CAS_CA = 3 + SERVER_CA_MODE_GOOGLE_MANAGED_PER_INSTANCE_CA = 1 + SERVER_CA_MODE_GOOGLE_MANAGED_SHARED_CA = 2 + SERVER_CA_MODE_CUSTOMER_MANAGED_CAS_CA = 3 + class StateInfo(proto.Message): r"""Additional information about the state of the instance. @@ -473,6 +523,22 @@ class ConnectionDetail(proto.Message): number=26, enum=Mode, ) + server_ca_mode: ServerCaMode = proto.Field( + proto.ENUM, + number=56, + optional=True, + enum=ServerCaMode, + ) + server_ca_pool: str = proto.Field( + proto.STRING, + number=57, + optional=True, + ) + rotate_server_certificate: bool = proto.Field( + proto.BOOL, + number=58, + optional=True, + ) class PscAutoConnection(proto.Message): @@ -1179,6 +1245,86 @@ class CertChain(proto.Message): ) +class SharedRegionalCertificateAuthority(proto.Message): + r"""Shared regional certificate authority for an instance. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + managed_server_ca (google.cloud.memorystore_v1beta.types.SharedRegionalCertificateAuthority.RegionalManagedCertificateAuthority): + CA certificate chains for memorystore managed + server authentication. + + This field is a member of `oneof`_ ``server_ca``. + name (str): + Identifier. Unique name of the resource in this scope + including project and location using the form: + ``projects/{project}/locations/{location}/sharedRegionalCertificateAuthority`` + """ + + class RegionalManagedCertificateAuthority(proto.Message): + r"""CA certificate chains for memorystore managed server + authentication. + + Attributes: + ca_certs (MutableSequence[google.cloud.memorystore_v1beta.types.SharedRegionalCertificateAuthority.RegionalManagedCertificateAuthority.RegionalCertChain]): + The PEM encoded CA certificate chains for + memorystore managed server authentication + """ + + class RegionalCertChain(proto.Message): + r"""The certificates that form the CA chain, from leaf to root + order. + + Attributes: + certificates (MutableSequence[str]): + The certificates that form the CA chain, from + leaf to root order. + """ + + certificates: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + ca_certs: MutableSequence[ + "SharedRegionalCertificateAuthority.RegionalManagedCertificateAuthority.RegionalCertChain" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SharedRegionalCertificateAuthority.RegionalManagedCertificateAuthority.RegionalCertChain", + ) + + managed_server_ca: RegionalManagedCertificateAuthority = proto.Field( + proto.MESSAGE, + number=2, + oneof="server_ca", + message=RegionalManagedCertificateAuthority, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetSharedRegionalCertificateAuthorityRequest(proto.Message): + r"""Request for + [GetSharedRegionalCertificateAuthority][google.cloud.memorystore.v1beta.Memorystore.GetSharedRegionalCertificateAuthority]. + + Attributes: + name (str): + Required. Regional certificate authority resource name using + the form: + ``projects/{project}/locations/{location}/sharedRegionalCertificateAuthority`` + where ``location_id`` refers to a Google Cloud region. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class OperationMetadata(proto.Message): r"""Represents the metadata of a long-running operation. diff --git a/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_shared_regional_certificate_authority_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_shared_regional_certificate_authority_sync.py new file mode 100644 index 000000000000..c7734f2f3d6a --- /dev/null +++ b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_shared_regional_certificate_authority_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSharedRegionalCertificateAuthority +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1beta_generated_Memorystore_GetSharedRegionalCertificateAuthority_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1beta + + +def sample_get_shared_regional_certificate_authority(): + # Create a client + client = memorystore_v1beta.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1beta.GetSharedRegionalCertificateAuthorityRequest( + name="name_value", + ) + + # Make the request + response = client.get_shared_regional_certificate_authority(request=request) + + # Handle the response + print(response) + + +# [END memorystore_v1beta_generated_Memorystore_GetSharedRegionalCertificateAuthority_sync] diff --git a/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json b/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json index 317dea8769ee..39b10f86ecdd 100644 --- a/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json +++ b/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json @@ -339,6 +339,86 @@ ], "title": "memorystore_v1beta_generated_memorystore_get_instance_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1beta.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1beta.MemorystoreClient.get_shared_regional_certificate_authority", + "method": { + "fullName": "google.cloud.memorystore.v1beta.Memorystore.GetSharedRegionalCertificateAuthority", + "service": { + "fullName": "google.cloud.memorystore.v1beta.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "GetSharedRegionalCertificateAuthority" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1beta.types.GetSharedRegionalCertificateAuthorityRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.memorystore_v1beta.types.SharedRegionalCertificateAuthority", + "shortName": "get_shared_regional_certificate_authority" + }, + "description": "Sample for GetSharedRegionalCertificateAuthority", + "file": "memorystore_v1beta_generated_memorystore_get_shared_regional_certificate_authority_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1beta_generated_Memorystore_GetSharedRegionalCertificateAuthority_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1beta_generated_memorystore_get_shared_regional_certificate_authority_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/test_memorystore.py b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/test_memorystore.py index 683ac15482af..9d4a7778253f 100644 --- a/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/test_memorystore.py +++ b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/test_memorystore.py @@ -2345,6 +2345,203 @@ def test_get_certificate_authority_rest_flattened_error(transport: str = "rest") ) +def test_get_shared_regional_certificate_authority_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_shared_regional_certificate_authority + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_shared_regional_certificate_authority + ] = mock_rpc + + request = {} + client.get_shared_regional_certificate_authority(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_shared_regional_certificate_authority(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_shared_regional_certificate_authority_rest_required_fields( + request_type=memorystore.GetSharedRegionalCertificateAuthorityRequest, +): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_shared_regional_certificate_authority._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_shared_regional_certificate_authority._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = memorystore.SharedRegionalCertificateAuthority() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.SharedRegionalCertificateAuthority.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_shared_regional_certificate_authority(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_shared_regional_certificate_authority_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.get_shared_regional_certificate_authority._get_unset_required_fields( + {} + ) + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_shared_regional_certificate_authority_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.SharedRegionalCertificateAuthority() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/sharedRegionalCertificateAuthority" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = memorystore.SharedRegionalCertificateAuthority.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_shared_regional_certificate_authority(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/sharedRegionalCertificateAuthority}" + % client.transport._host, + args[1], + ) + + +def test_get_shared_regional_certificate_authority_rest_flattened_error( + transport: str = "rest", +): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_shared_regional_certificate_authority( + memorystore.GetSharedRegionalCertificateAuthorityRequest(), + name="name_value", + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.MemorystoreRestTransport( @@ -2614,6 +2811,9 @@ def test_get_instance_rest_call_success(request_type): engine_version="engine_version_value", deletion_protection_enabled=True, mode=memorystore.Instance.Mode.STANDALONE, + server_ca_mode=memorystore.Instance.ServerCaMode.GOOGLE_MANAGED_PER_INSTANCE_CA, + server_ca_pool="server_ca_pool_value", + rotate_server_certificate=True, ) # Wrap the value into a proper Response obj @@ -2647,6 +2847,12 @@ def test_get_instance_rest_call_success(request_type): assert response.engine_version == "engine_version_value" assert response.deletion_protection_enabled is True assert response.mode == memorystore.Instance.Mode.STANDALONE + assert ( + response.server_ca_mode + == memorystore.Instance.ServerCaMode.GOOGLE_MANAGED_PER_INSTANCE_CA + ) + assert response.server_ca_pool == "server_ca_pool_value" + assert response.rotate_server_certificate is True @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -2812,6 +3018,9 @@ def test_create_instance_rest_call_success(request_type): } ], "mode": 1, + "server_ca_mode": 1, + "server_ca_pool": "server_ca_pool_value", + "rotate_server_certificate": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -3070,6 +3279,9 @@ def test_update_instance_rest_call_success(request_type): } ], "mode": 1, + "server_ca_mode": 1, + "server_ca_pool": "server_ca_pool_value", + "rotate_server_certificate": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -3481,6 +3693,148 @@ def test_get_certificate_authority_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_get_shared_regional_certificate_authority_rest_bad_request( + request_type=memorystore.GetSharedRegionalCertificateAuthorityRequest, +): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/sharedRegionalCertificateAuthority" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_shared_regional_certificate_authority(request) + + +@pytest.mark.parametrize( + "request_type", + [ + memorystore.GetSharedRegionalCertificateAuthorityRequest, + dict, + ], +) +def test_get_shared_regional_certificate_authority_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/sharedRegionalCertificateAuthority" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.SharedRegionalCertificateAuthority( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.SharedRegionalCertificateAuthority.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_shared_regional_certificate_authority(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, memorystore.SharedRegionalCertificateAuthority) + assert response.name == "name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_shared_regional_certificate_authority_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.MemorystoreRestInterceptor, + "post_get_shared_regional_certificate_authority", + ) as post, + mock.patch.object( + transports.MemorystoreRestInterceptor, + "post_get_shared_regional_certificate_authority_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.MemorystoreRestInterceptor, + "pre_get_shared_regional_certificate_authority", + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = memorystore.GetSharedRegionalCertificateAuthorityRequest.pb( + memorystore.GetSharedRegionalCertificateAuthorityRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = memorystore.SharedRegionalCertificateAuthority.to_json( + memorystore.SharedRegionalCertificateAuthority() + ) + req.return_value.content = return_value + + request = memorystore.GetSharedRegionalCertificateAuthorityRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = memorystore.SharedRegionalCertificateAuthority() + post_with_metadata.return_value = ( + memorystore.SharedRegionalCertificateAuthority(), + metadata, + ) + + client.get_shared_regional_certificate_authority( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3984,6 +4338,28 @@ def test_get_certificate_authority_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_shared_regional_certificate_authority_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_shared_regional_certificate_authority), "__call__" + ) as call: + client.get_shared_regional_certificate_authority(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.GetSharedRegionalCertificateAuthorityRequest() + + assert args[0] == request_msg + + def test_memorystore_rest_lro_client(): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4029,6 +4405,7 @@ def test_memorystore_base_transport(): "update_instance", "delete_instance", "get_certificate_authority", + "get_shared_regional_certificate_authority", "get_location", "list_locations", "get_operation", @@ -4195,12 +4572,41 @@ def test_memorystore_client_transport_session_collision(transport_name): session1 = client1.transport.get_certificate_authority._session session2 = client2.transport.get_certificate_authority._session assert session1 != session2 + session1 = client1.transport.get_shared_regional_certificate_authority._session + session2 = client2.transport.get_shared_regional_certificate_authority._session + assert session1 != session2 -def test_certificate_authority_path(): +def test_ca_pool_path(): project = "squid" location = "clam" - instance = "whelk" + ca_pool = "whelk" + expected = "projects/{project}/locations/{location}/caPools/{ca_pool}".format( + project=project, + location=location, + ca_pool=ca_pool, + ) + actual = MemorystoreClient.ca_pool_path(project, location, ca_pool) + assert expected == actual + + +def test_parse_ca_pool_path(): + expected = { + "project": "octopus", + "location": "oyster", + "ca_pool": "nudibranch", + } + path = MemorystoreClient.ca_pool_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_ca_pool_path(path) + assert expected == actual + + +def test_certificate_authority_path(): + project = "cuttlefish" + location = "mussel" + instance = "winkle" expected = "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format( project=project, location=location, @@ -4212,9 +4618,9 @@ def test_certificate_authority_path(): def test_parse_certificate_authority_path(): expected = { - "project": "octopus", - "location": "oyster", - "instance": "nudibranch", + "project": "nautilus", + "location": "scallop", + "instance": "abalone", } path = MemorystoreClient.certificate_authority_path(**expected) @@ -4224,9 +4630,9 @@ def test_parse_certificate_authority_path(): def test_forwarding_rule_path(): - project = "cuttlefish" - region = "mussel" - forwarding_rule = "winkle" + project = "squid" + region = "clam" + forwarding_rule = "whelk" expected = ( "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format( project=project, @@ -4240,9 +4646,9 @@ def test_forwarding_rule_path(): def test_parse_forwarding_rule_path(): expected = { - "project": "nautilus", - "region": "scallop", - "forwarding_rule": "abalone", + "project": "octopus", + "region": "oyster", + "forwarding_rule": "nudibranch", } path = MemorystoreClient.forwarding_rule_path(**expected) @@ -4252,9 +4658,9 @@ def test_parse_forwarding_rule_path(): def test_instance_path(): - project = "squid" - location = "clam" - instance = "whelk" + project = "cuttlefish" + location = "mussel" + instance = "winkle" expected = "projects/{project}/locations/{location}/instances/{instance}".format( project=project, location=location, @@ -4266,9 +4672,9 @@ def test_instance_path(): def test_parse_instance_path(): expected = { - "project": "octopus", - "location": "oyster", - "instance": "nudibranch", + "project": "nautilus", + "location": "scallop", + "instance": "abalone", } path = MemorystoreClient.instance_path(**expected) @@ -4278,8 +4684,8 @@ def test_parse_instance_path(): def test_network_path(): - project = "cuttlefish" - network = "mussel" + project = "squid" + network = "clam" expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, @@ -4290,8 +4696,8 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "winkle", - "network": "nautilus", + "project": "whelk", + "network": "octopus", } path = MemorystoreClient.network_path(**expected) @@ -4301,9 +4707,9 @@ def test_parse_network_path(): def test_service_attachment_path(): - project = "scallop" - region = "abalone" - service_attachment = "squid" + project = "oyster" + region = "nudibranch" + service_attachment = "cuttlefish" expected = "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( project=project, region=region, @@ -4317,9 +4723,9 @@ def test_service_attachment_path(): def test_parse_service_attachment_path(): expected = { - "project": "clam", - "region": "whelk", - "service_attachment": "octopus", + "project": "mussel", + "region": "winkle", + "service_attachment": "nautilus", } path = MemorystoreClient.service_attachment_path(**expected) @@ -4328,8 +4734,33 @@ def test_parse_service_attachment_path(): assert expected == actual +def test_shared_regional_certificate_authority_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}/sharedRegionalCertificateAuthority".format( + project=project, + location=location, + ) + actual = MemorystoreClient.shared_regional_certificate_authority_path( + project, location + ) + assert expected == actual + + +def test_parse_shared_regional_certificate_authority_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = MemorystoreClient.shared_regional_certificate_authority_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_shared_regional_certificate_authority_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -4339,7 +4770,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "octopus", } path = MemorystoreClient.common_billing_account_path(**expected) @@ -4349,7 +4780,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "oyster" expected = "folders/{folder}".format( folder=folder, ) @@ -4359,7 +4790,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "nudibranch", } path = MemorystoreClient.common_folder_path(**expected) @@ -4369,7 +4800,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "cuttlefish" expected = "organizations/{organization}".format( organization=organization, ) @@ -4379,7 +4810,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "mussel", } path = MemorystoreClient.common_organization_path(**expected) @@ -4389,7 +4820,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "winkle" expected = "projects/{project}".format( project=project, ) @@ -4399,7 +4830,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "nautilus", } path = MemorystoreClient.common_project_path(**expected) @@ -4409,8 +4840,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -4421,8 +4852,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "squid", + "location": "clam", } path = MemorystoreClient.common_location_path(**expected) diff --git a/packages/google-cloud-migrationcenter/.repo-metadata.json b/packages/google-cloud-migrationcenter/.repo-metadata.json index 30684fe6602b..c6073959d41c 100644 --- a/packages/google-cloud-migrationcenter/.repo-metadata.json +++ b/packages/google-cloud-migrationcenter/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "A unified platform that helps you accelerate your end-to-end cloud journey from your current on-premises or cloud environments to Google Cloud.", - "api_id": "migrationcenter.googleapis.com", - "api_shortname": "migrationcenter", - "client_documentation": "https://cloud.google.com/python/docs/reference/migrationcenter/latest", - "default_version": "v1", - "distribution_name": "google-cloud-migrationcenter", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "migrationcenter", - "name_pretty": "Migration Center API", - "product_documentation": "https://cloud.google.com/migration-center/docs/migration-center-overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "A unified platform that helps you accelerate your end-to-end cloud journey from your current on-premises or cloud environments to Google Cloud.", + "api_id": "migrationcenter.googleapis.com", + "api_shortname": "migrationcenter", + "client_documentation": "https://cloud.google.com/python/docs/reference/migrationcenter/latest", + "default_version": "v1", + "distribution_name": "google-cloud-migrationcenter", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "migrationcenter", + "name_pretty": "Migration Center API", + "product_documentation": "https://cloud.google.com/migration-center/docs/migration-center-overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-modelarmor/.repo-metadata.json b/packages/google-cloud-modelarmor/.repo-metadata.json index 650581bcf98a..ca5dc54bfb35 100644 --- a/packages/google-cloud-modelarmor/.repo-metadata.json +++ b/packages/google-cloud-modelarmor/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Model Armor helps you protect against risks like prompt injection, harmful content, and data leakage in generative AI applications by letting you define policies that filter user prompts and model responses.", - "api_id": "securitycenter.googleapis.com", - "api_shortname": "securitycenter", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-modelarmor/latest", - "default_version": "v1", - "distribution_name": "google-cloud-modelarmor", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1514910&template=0", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-modelarmor", - "name_pretty": "Model Armor API", - "product_documentation": "https://cloud.google.com/security-command-center/docs/model-armor-overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Model Armor helps you protect against risks like prompt injection, harmful content, and data leakage in generative AI applications by letting you define policies that filter user prompts and model responses.", + "api_id": "securitycenter.googleapis.com", + "api_shortname": "securitycenter", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-modelarmor/latest", + "default_version": "v1", + "distribution_name": "google-cloud-modelarmor", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1514910\u0026template=0", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-modelarmor", + "name_pretty": "Model Armor API", + "product_documentation": "https://cloud.google.com/security-command-center/docs/model-armor-overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-monitoring-dashboards/.repo-metadata.json b/packages/google-cloud-monitoring-dashboards/.repo-metadata.json index b9f6ca28e4f0..b6a5e1d8439e 100644 --- a/packages/google-cloud-monitoring-dashboards/.repo-metadata.json +++ b/packages/google-cloud-monitoring-dashboards/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "are one way for you to view and analyze metric data. The Cloud Console provides predefined dashboards that require no setup or configuration. You can also define custom dashboards. With custom dashboards, you have complete control over the charts that are displayed and their configuration.", - "api_id": "monitoring.googleapis.com", - "api_shortname": "monitoring", - "client_documentation": "https://cloud.google.com/python/docs/reference/monitoring-dashboards/latest", - "default_version": "v1", - "distribution_name": "google-cloud-monitoring-dashboards", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559785", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "monitoring-dashboards", - "name_pretty": "Monitoring Dashboards", - "product_documentation": "https://cloud.google.com/monitoring/dashboards/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "are one way for you to view and analyze metric data. The Cloud Console provides predefined dashboards that require no setup or configuration. You can also define custom dashboards. With custom dashboards, you have complete control over the charts that are displayed and their configuration.", + "api_id": "monitoring.googleapis.com", + "api_shortname": "monitoring", + "client_documentation": "https://cloud.google.com/python/docs/reference/monitoring-dashboards/latest", + "default_version": "v1", + "distribution_name": "google-cloud-monitoring-dashboards", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559785", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "monitoring-dashboards", + "name_pretty": "Monitoring Dashboards", + "product_documentation": "https://cloud.google.com/monitoring/dashboards/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-monitoring-dashboards/docs/index.rst b/packages/google-cloud-monitoring-dashboards/docs/index.rst index 8980aded87fb..33689ea38185 100644 --- a/packages/google-cloud-monitoring-dashboards/docs/index.rst +++ b/packages/google-cloud-monitoring-dashboards/docs/index.rst @@ -8,8 +8,8 @@ API Reference .. toctree:: :maxdepth: 2 - monitoring_dashboard_v1/services_ - monitoring_dashboard_v1/types_ + dashboard_v1/services_ + dashboard_v1/types_ Changelog diff --git a/packages/google-cloud-monitoring-metrics-scopes/.repo-metadata.json b/packages/google-cloud-monitoring-metrics-scopes/.repo-metadata.json index 9285fef980e6..2a95a0d12afc 100644 --- a/packages/google-cloud-monitoring-metrics-scopes/.repo-metadata.json +++ b/packages/google-cloud-monitoring-metrics-scopes/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Manages your Cloud Monitoring data and configurations.", - "api_id": "monitoring.googleapis.com", - "api_shortname": "monitoring", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-monitoring-metrics-scopes/latest", - "default_version": "v1", - "distribution_name": "google-cloud-monitoring-metrics-scopes", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559785", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-monitoring-metrics-scopes", - "name_pretty": "Metrics Scopes", - "product_documentation": "https://cloud.google.com/monitoring/docs", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Manages your Cloud Monitoring data and configurations.", + "api_id": "monitoring.googleapis.com", + "api_shortname": "monitoring", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-monitoring-metrics-scopes/latest", + "default_version": "v1", + "distribution_name": "google-cloud-monitoring-metrics-scopes", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559785", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-monitoring-metrics-scopes", + "name_pretty": "Metrics Scopes", + "product_documentation": "https://cloud.google.com/monitoring/docs", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-monitoring/.repo-metadata.json b/packages/google-cloud-monitoring/.repo-metadata.json index eaea0cff6290..e96bc72ce19a 100644 --- a/packages/google-cloud-monitoring/.repo-metadata.json +++ b/packages/google-cloud-monitoring/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "collects metrics, events, and metadata from Google Cloud, Amazon Web Services (AWS), hosted uptime probes, and application instrumentation. Using the BindPlane service, you can also collect this data from over 150 common application components, on-premise systems, and hybrid cloud systems. Stackdriver ingests that data and generates insights via dashboards, charts, and alerts. BindPlane is included with your Google Cloud project at no additional cost.", - "api_id": "monitoring.googleapis.com", - "api_shortname": "monitoring", - "client_documentation": "https://cloud.google.com/python/docs/reference/monitoring/latest", - "default_version": "v3", - "distribution_name": "google-cloud-monitoring", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559785", - "language": "python", - "library_type": "GAPIC_COMBO", - "name": "monitoring", - "name_pretty": "Stackdriver Monitoring", - "product_documentation": "https://cloud.google.com/monitoring/docs", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "collects metrics, events, and metadata from Google Cloud, Amazon Web Services (AWS), hosted uptime probes, and application instrumentation. Using the BindPlane service, you can also collect this data from over 150 common application components, on-premise systems, and hybrid cloud systems. Stackdriver ingests that data and generates insights via dashboards, charts, and alerts. BindPlane is included with your Google Cloud project at no additional cost.", + "api_id": "monitoring.googleapis.com", + "api_shortname": "monitoring", + "client_documentation": "https://cloud.google.com/python/docs/reference/monitoring/latest", + "default_version": "v3", + "distribution_name": "google-cloud-monitoring", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559785", + "language": "python", + "library_type": "GAPIC_COMBO", + "name": "monitoring", + "name_pretty": "Stackdriver Monitoring", + "product_documentation": "https://cloud.google.com/monitoring/docs", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-ndb/.repo-metadata.json b/packages/google-cloud-ndb/.repo-metadata.json index c0e7031d4a82..e3073cad2e73 100644 --- a/packages/google-cloud-ndb/.repo-metadata.json +++ b/packages/google-cloud-ndb/.repo-metadata.json @@ -1,14 +1,12 @@ { - "name": "python-ndb", - "name_pretty": "NDB Client Library for Google Cloud Datastore", + "api_shortname": "datastore", "client_documentation": "https://googleapis.dev/python/python-ndb/latest", + "distribution_name": "google-cloud-ndb", "issue_tracker": "https://github.com/googleapis/python-ndb/issues", - "release_level": "stable", "language": "python", "library_type": "GAPIC_MANUAL", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-ndb", - "default_version": "", - "codeowner_team": "@googleapis/firestore-dpe @googleapis/gcs-sdk-team", - "api_shortname": "datastore" -} + "name": "python-ndb", + "name_pretty": "NDB Client Library for Google Cloud Datastore", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-netapp/.repo-metadata.json b/packages/google-cloud-netapp/.repo-metadata.json index 341870461b53..60466bc5db7b 100644 --- a/packages/google-cloud-netapp/.repo-metadata.json +++ b/packages/google-cloud-netapp/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "NetApp API", - "api_id": "netapp.googleapis.com", - "api_shortname": "netapp", - "client_documentation": "https://cloud.google.com/python/docs/reference/netapp/latest", - "default_version": "v1", - "distribution_name": "google-cloud-netapp", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1144971", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "netapp", - "name_pretty": "NetApp API", - "product_documentation": "https://cloud.google.com/netapp/volumes/docs/discover/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "NetApp API", + "api_id": "netapp.googleapis.com", + "api_shortname": "netapp", + "client_documentation": "https://cloud.google.com/python/docs/reference/netapp/latest", + "default_version": "v1", + "distribution_name": "google-cloud-netapp", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1144971", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "netapp", + "name_pretty": "NetApp API", + "product_documentation": "https://cloud.google.com/netapp/volumes/docs/discover/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-network-connectivity/.repo-metadata.json b/packages/google-cloud-network-connectivity/.repo-metadata.json index af05239b2de0..5161754606d3 100644 --- a/packages/google-cloud-network-connectivity/.repo-metadata.json +++ b/packages/google-cloud-network-connectivity/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "The Network Connectivity API will be home to various services which provide information pertaining to network connectivity. This includes information like interconnects, VPNs, VPCs, routing information, ip address details, etc. This information will help customers verify their network configurations and helps them to discover misconfigurations, inconsistencies, etc.", - "api_id": "networkconnectivity.googleapis.com", - "api_shortname": "networkconnectivity", - "client_documentation": "https://cloud.google.com/python/docs/reference/networkconnectivity/latest", - "default_version": "v1", - "distribution_name": "google-cloud-network-connectivity", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "networkconnectivity", - "name_pretty": "Network Connectivity Center", - "product_documentation": "https://cloud.google.com/network-connectivity/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "The Network Connectivity API will be home to various services which provide information pertaining to network connectivity. This includes information like interconnects, VPNs, VPCs, routing information, ip address details, etc. This information will help customers verify their network configurations and helps them to discover misconfigurations, inconsistencies, etc.", + "api_id": "networkconnectivity.googleapis.com", + "api_shortname": "networkconnectivity", + "client_documentation": "https://cloud.google.com/python/docs/reference/networkconnectivity/latest", + "default_version": "v1", + "distribution_name": "google-cloud-network-connectivity", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "networkconnectivity", + "name_pretty": "Network Connectivity Center", + "product_documentation": "https://cloud.google.com/network-connectivity/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-network-management/.repo-metadata.json b/packages/google-cloud-network-management/.repo-metadata.json index 71951760e2a2..bb29e6e2f6b7 100644 --- a/packages/google-cloud-network-management/.repo-metadata.json +++ b/packages/google-cloud-network-management/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "provides a collection of network performance monitoring and diagnostic capabilities.", - "api_id": "networkmanagement.googleapis.com", - "api_shortname": "networkmanagement", - "client_documentation": "https://cloud.google.com/python/docs/reference/networkmanagement/latest", - "default_version": "v1", - "distribution_name": "google-cloud-network-management", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "networkmanagement", - "name_pretty": "Network Management", - "product_documentation": "https://cloud.google.com/network-management", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "provides a collection of network performance monitoring and diagnostic capabilities.", + "api_id": "networkmanagement.googleapis.com", + "api_shortname": "networkmanagement", + "client_documentation": "https://cloud.google.com/python/docs/reference/networkmanagement/latest", + "default_version": "v1", + "distribution_name": "google-cloud-network-management", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "networkmanagement", + "name_pretty": "Network Management", + "product_documentation": "https://cloud.google.com/network-management", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-network-security/.repo-metadata.json b/packages/google-cloud-network-security/.repo-metadata.json index bf9be45ec2a9..b8c143f2ee01 100644 --- a/packages/google-cloud-network-security/.repo-metadata.json +++ b/packages/google-cloud-network-security/.repo-metadata.json @@ -1,15 +1,15 @@ { - "api_id": "networksecurity.googleapis.com", - "api_shortname": "networksecurity", - "client_documentation": "https://cloud.google.com/python/docs/reference/networksecurity/latest", - "default_version": "v1", - "distribution_name": "google-cloud-network-security", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "networksecurity", - "name_pretty": "Network Security", - "product_documentation": "https://cloud.google.com/traffic-director/docs/reference/network-security/rest", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_id": "networksecurity.googleapis.com", + "api_shortname": "networksecurity", + "client_documentation": "https://cloud.google.com/python/docs/reference/networksecurity/latest", + "default_version": "v1", + "distribution_name": "google-cloud-network-security", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1132189\u0026template=1639113", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "networksecurity", + "name_pretty": "Network Security", + "product_documentation": "https://cloud.google.com/traffic-director/docs/reference/network-security/rest", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-network-services/.repo-metadata.json b/packages/google-cloud-network-services/.repo-metadata.json index 676b400e05da..19b7545ce7cf 100644 --- a/packages/google-cloud-network-services/.repo-metadata.json +++ b/packages/google-cloud-network-services/.repo-metadata.json @@ -1,15 +1,15 @@ { - "api_id": "networkservices.googleapis.com", - "api_shortname": "networkservices", - "client_documentation": "https://cloud.google.com/python/docs/reference/networkservices/latest", - "default_version": "v1", - "distribution_name": "google-cloud-network-services", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "networkservices", - "name_pretty": "Network Services", - "product_documentation": "https://cloud.google.com", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_id": "networkservices.googleapis.com", + "api_shortname": "networkservices", + "client_documentation": "https://cloud.google.com/python/docs/reference/networkservices/latest", + "default_version": "v1", + "distribution_name": "google-cloud-network-services", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1132189\u0026template=1639113", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "networkservices", + "name_pretty": "Network Services", + "product_documentation": "https://cloud.google.com", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-notebooks/.repo-metadata.json b/packages/google-cloud-notebooks/.repo-metadata.json index de90f3faed31..9bc558f43884 100644 --- a/packages/google-cloud-notebooks/.repo-metadata.json +++ b/packages/google-cloud-notebooks/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "is a managed service that offers an integrated and secure JupyterLab environment for data scientists and machine learning developers to experiment, develop, and deploy models into production. Users can create instances running JupyterLab that come pre-installed with the latest data science and machine learning frameworks in a single click.", - "api_id": "notebooks.googleapis.com", - "api_shortname": "notebooks", - "client_documentation": "https://cloud.google.com/python/docs/reference/notebooks/latest", - "default_version": "v1", - "distribution_name": "google-cloud-notebooks", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "notebooks", - "name_pretty": "AI Platform Notebooks", - "product_documentation": "https://cloud.google.com/ai-platform/notebooks/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "is a managed service that offers an integrated and secure JupyterLab environment for data scientists and machine learning developers to experiment, develop, and deploy models into production. Users can create instances running JupyterLab that come pre-installed with the latest data science and machine learning frameworks in a single click.", + "api_id": "notebooks.googleapis.com", + "api_shortname": "notebooks", + "client_documentation": "https://cloud.google.com/python/docs/reference/notebooks/latest", + "default_version": "v1", + "distribution_name": "google-cloud-notebooks", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1392625", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "notebooks", + "name_pretty": "AI Platform Notebooks", + "product_documentation": "https://cloud.google.com/ai-platform/notebooks/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-optimization/.repo-metadata.json b/packages/google-cloud-optimization/.repo-metadata.json index 5d665ef0fb22..a2d4ac53f70c 100644 --- a/packages/google-cloud-optimization/.repo-metadata.json +++ b/packages/google-cloud-optimization/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "is a managed routing service that takes your list of orders, vehicles, constraints, and objectives and returns the most efficient plan for your entire fleet in near real-time.", - "api_id": "cloudoptimization.googleapis.com", - "api_shortname": "cloudoptimization", - "client_documentation": "https://cloud.google.com/python/docs/reference/optimization/latest", - "default_version": "v1", - "distribution_name": "google-cloud-optimization", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "optimization", - "name_pretty": "Cloud Optimization", - "product_documentation": "https://cloud.google.com/optimization/docs", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "is a managed routing service that takes your list of orders, vehicles, constraints, and objectives and returns the most efficient plan for your entire fleet in near real-time.", + "api_id": "cloudoptimization.googleapis.com", + "api_shortname": "cloudoptimization", + "client_documentation": "https://cloud.google.com/python/docs/reference/optimization/latest", + "default_version": "v1", + "distribution_name": "google-cloud-optimization", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "optimization", + "name_pretty": "Cloud Optimization", + "product_documentation": "https://cloud.google.com/optimization/docs", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-oracledatabase/.repo-metadata.json b/packages/google-cloud-oracledatabase/.repo-metadata.json index 1aa0ba283387..b8618584e483 100644 --- a/packages/google-cloud-oracledatabase/.repo-metadata.json +++ b/packages/google-cloud-oracledatabase/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "The Oracle Database@Google Cloud API provides a set of APIs to manage Oracle database services, such as Exadata and Autonomous Databases.", - "api_id": "oracledatabase.googleapis.com", - "api_shortname": "oracledatabase", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest", - "default_version": "v1", - "distribution_name": "google-cloud-oracledatabase", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1492565", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-oracledatabase", - "name_pretty": "Oracle Database@Google Cloud API", - "product_documentation": "https://cloud.google.com/oracle/database/docs", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "The Oracle Database@Google Cloud API provides a set of APIs to manage Oracle database services, such as Exadata and Autonomous Databases.", + "api_id": "oracledatabase.googleapis.com", + "api_shortname": "oracledatabase", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-oracledatabase/latest", + "default_version": "v1", + "distribution_name": "google-cloud-oracledatabase", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1492565", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-oracledatabase", + "name_pretty": "Oracle Database@Google Cloud API", + "product_documentation": "https://cloud.google.com/oracle/database/docs", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-orchestration-airflow/.repo-metadata.json b/packages/google-cloud-orchestration-airflow/.repo-metadata.json index aa162f298436..38c5dce6e76b 100644 --- a/packages/google-cloud-orchestration-airflow/.repo-metadata.json +++ b/packages/google-cloud-orchestration-airflow/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "is a managed Apache Airflow service that helps you create, schedule, monitor and manage workflows. Cloud Composer automation helps you create Airflow environments quickly and use Airflow-native tools, such as the powerful Airflow web interface and command line tools, so you can focus on your workflows and not your infrastructure.", - "api_id": "composer.googleapis.com", - "api_shortname": "composer", - "client_documentation": "https://cloud.google.com/python/docs/reference/composer/latest", - "default_version": "v1", - "distribution_name": "google-cloud-orchestration-airflow", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "composer", - "name_pretty": "Cloud Composer", - "product_documentation": "https://cloud.google.com/composer/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "is a managed Apache Airflow service that helps you create, schedule, monitor and manage workflows. Cloud Composer automation helps you create Airflow environments quickly and use Airflow-native tools, such as the powerful Airflow web interface and command line tools, so you can focus on your workflows and not your infrastructure.", + "api_id": "composer.googleapis.com", + "api_shortname": "composer", + "client_documentation": "https://cloud.google.com/python/docs/reference/composer/latest", + "default_version": "v1", + "distribution_name": "google-cloud-orchestration-airflow", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "composer", + "name_pretty": "Cloud Composer", + "product_documentation": "https://cloud.google.com/composer/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-org-policy/.repo-metadata.json b/packages/google-cloud-org-policy/.repo-metadata.json index f7ec4ea33934..51306f3c25ff 100644 --- a/packages/google-cloud-org-policy/.repo-metadata.json +++ b/packages/google-cloud-org-policy/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "The Organization Policy API allows users to configure governance rules on their GCP resources across the Cloud Resource Hierarchy.", - "api_id": "orgpolicy.googleapis.com", - "api_shortname": "orgpolicy", - "client_documentation": "https://cloud.google.com/python/docs/reference/orgpolicy/latest", - "default_version": "v2", - "distribution_name": "google-cloud-org-policy", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "orgpolicy", - "name_pretty": "Organization Policy", - "product_documentation": "https://cloud.google.com/resource-manager/docs/organization-policy/overview", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "The Organization Policy API allows users to configure governance rules on their GCP resources across the Cloud Resource Hierarchy.", + "api_id": "orgpolicy.googleapis.com", + "api_shortname": "orgpolicy", + "client_documentation": "https://cloud.google.com/python/docs/reference/orgpolicy/latest", + "default_version": "v2", + "distribution_name": "google-cloud-org-policy", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "orgpolicy", + "name_pretty": "Organization Policy", + "product_documentation": "https://cloud.google.com/resource-manager/docs/organization-policy/overview", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-os-config/.repo-metadata.json b/packages/google-cloud-os-config/.repo-metadata.json index 91bb8e2d89f4..4b840a462160 100644 --- a/packages/google-cloud-os-config/.repo-metadata.json +++ b/packages/google-cloud-os-config/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "provides OS management tools that can be used for patch management, patch compliance, and configuration management on VM instances.", - "api_id": "osconfig.googleapis.com", - "api_shortname": "osconfig", - "client_documentation": "https://cloud.google.com/python/docs/reference/osconfig/latest", - "default_version": "v1", - "distribution_name": "google-cloud-os-config", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "osconfig", - "name_pretty": "OS Config", - "product_documentation": "https://cloud.google.com/compute/docs/manage-os", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "provides OS management tools that can be used for patch management, patch compliance, and configuration management on VM instances.", + "api_id": "osconfig.googleapis.com", + "api_shortname": "osconfig", + "client_documentation": "https://cloud.google.com/python/docs/reference/osconfig/latest", + "default_version": "v1", + "distribution_name": "google-cloud-os-config", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "osconfig", + "name_pretty": "OS Config", + "product_documentation": "https://cloud.google.com/compute/docs/manage-os", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-os-login/.repo-metadata.json b/packages/google-cloud-os-login/.repo-metadata.json index 6eaca9fedd8c..44073acf1f41 100644 --- a/packages/google-cloud-os-login/.repo-metadata.json +++ b/packages/google-cloud-os-login/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_id": "oslogin.googleapis.com", - "api_shortname": "oslogin", - "client_documentation": "https://cloud.google.com/python/docs/reference/oslogin/latest", - "default_version": "v1", - "distribution_name": "google-cloud-os-login", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559755", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "oslogin", - "name_pretty": "Google Cloud OS Login", - "product_documentation": "https://cloud.google.com/compute/docs/oslogin/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "You can use OS Login to manage access to your VM instances using IAM roles.", + "api_id": "oslogin.googleapis.com", + "api_shortname": "oslogin", + "client_documentation": "https://cloud.google.com/python/docs/reference/oslogin/latest", + "default_version": "v1", + "distribution_name": "google-cloud-os-login", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559755", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "oslogin", + "name_pretty": "Google Cloud OS Login", + "product_documentation": "https://cloud.google.com/compute/docs/oslogin/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-os-login/README.rst b/packages/google-cloud-os-login/README.rst index 40b0f34aba05..ceb9ef4baad8 100644 --- a/packages/google-cloud-os-login/README.rst +++ b/packages/google-cloud-os-login/README.rst @@ -3,7 +3,7 @@ Python Client for Google Cloud OS Login |stable| |pypi| |versions| -`Google Cloud OS Login`_: +`Google Cloud OS Login`_: You can use OS Login to manage access to your VM instances using IAM roles. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-os-login/docs/README.rst b/packages/google-cloud-os-login/docs/README.rst index 40b0f34aba05..ceb9ef4baad8 100644 --- a/packages/google-cloud-os-login/docs/README.rst +++ b/packages/google-cloud-os-login/docs/README.rst @@ -3,7 +3,7 @@ Python Client for Google Cloud OS Login |stable| |pypi| |versions| -`Google Cloud OS Login`_: +`Google Cloud OS Login`_: You can use OS Login to manage access to your VM instances using IAM roles. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/async_client.py b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/async_client.py index 29f20cbd3a5a..2c20d0223743 100644 --- a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/async_client.py +++ b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/async_client.py @@ -45,8 +45,8 @@ OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore -from google.cloud.oslogin_v1.common.types import common +from google.cloud.oslogin_v1.common.types import common from google.cloud.oslogin_v1.types import oslogin from .client import OsLoginServiceClient diff --git a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/client.py b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/client.py index 660bbe83725b..353bde94142a 100644 --- a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/client.py +++ b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/client.py @@ -62,8 +62,8 @@ _LOGGER = std_logging.getLogger(__name__) import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore -from google.cloud.oslogin_v1.common.types import common +from google.cloud.oslogin_v1.common.types import common from google.cloud.oslogin_v1.types import oslogin from .transports.base import DEFAULT_CLIENT_INFO, OsLoginServiceTransport diff --git a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/base.py b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/base.py index f853cf10157b..86fdbf553c87 100644 --- a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/base.py +++ b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/base.py @@ -24,10 +24,10 @@ from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore -from google.cloud.oslogin_v1.common.types import common from google.oauth2 import service_account # type: ignore from google.cloud.oslogin_v1 import gapic_version as package_version +from google.cloud.oslogin_v1.common.types import common from google.cloud.oslogin_v1.types import oslogin DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/grpc.py b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/grpc.py index ef2b8a392bc7..c3ba1f6d689e 100644 --- a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/grpc.py +++ b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/grpc.py @@ -27,9 +27,9 @@ from google.api_core import gapic_v1, grpc_helpers from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.cloud.oslogin_v1.common.types import common from google.protobuf.json_format import MessageToJson +from google.cloud.oslogin_v1.common.types import common from google.cloud.oslogin_v1.types import oslogin from .base import DEFAULT_CLIENT_INFO, OsLoginServiceTransport diff --git a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/grpc_asyncio.py b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/grpc_asyncio.py index ace1a50aedf9..a33746ca33b5 100644 --- a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/grpc_asyncio.py @@ -29,10 +29,10 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.cloud.oslogin_v1.common.types import common from google.protobuf.json_format import MessageToJson from grpc.experimental import aio # type: ignore +from google.cloud.oslogin_v1.common.types import common from google.cloud.oslogin_v1.types import oslogin from .base import DEFAULT_CLIENT_INFO, OsLoginServiceTransport diff --git a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/rest.py b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/rest.py index 8d4a5f33a491..dbb126979a5a 100644 --- a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/rest.py +++ b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/rest.py @@ -26,10 +26,10 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.cloud.oslogin_v1.common.types import common from google.protobuf import json_format from requests import __version__ as requests_version +from google.cloud.oslogin_v1.common.types import common from google.cloud.oslogin_v1.types import oslogin from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO diff --git a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/rest_base.py b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/rest_base.py index 42e72a780b9d..33cbee45af67 100644 --- a/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/rest_base.py +++ b/packages/google-cloud-os-login/google/cloud/oslogin_v1/services/os_login_service/transports/rest_base.py @@ -19,9 +19,9 @@ import google.protobuf.empty_pb2 as empty_pb2 # type: ignore from google.api_core import gapic_v1, path_template -from google.cloud.oslogin_v1.common.types import common from google.protobuf import json_format +from google.cloud.oslogin_v1.common.types import common from google.cloud.oslogin_v1.types import oslogin from .base import DEFAULT_CLIENT_INFO, OsLoginServiceTransport diff --git a/packages/google-cloud-os-login/google/cloud/oslogin_v1/types/oslogin.py b/packages/google-cloud-os-login/google/cloud/oslogin_v1/types/oslogin.py index 0a5ad23f2924..126e189de62a 100644 --- a/packages/google-cloud-os-login/google/cloud/oslogin_v1/types/oslogin.py +++ b/packages/google-cloud-os-login/google/cloud/oslogin_v1/types/oslogin.py @@ -19,6 +19,7 @@ import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import proto # type: ignore + from google.cloud.oslogin_v1.common.types import common __protobuf__ = proto.module( diff --git a/packages/google-cloud-os-login/tests/unit/gapic/oslogin_v1/test_os_login_service.py b/packages/google-cloud-os-login/tests/unit/gapic/oslogin_v1/test_os_login_service.py index bcff50f5f3ec..2aa92b2c69b8 100644 --- a/packages/google-cloud-os-login/tests/unit/gapic/oslogin_v1/test_os_login_service.py +++ b/packages/google-cloud-os-login/tests/unit/gapic/oslogin_v1/test_os_login_service.py @@ -56,9 +56,9 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.oslogin_v1.common.types import common from google.oauth2 import service_account +from google.cloud.oslogin_v1.common.types import common from google.cloud.oslogin_v1.services.os_login_service import ( OsLoginServiceAsyncClient, OsLoginServiceClient, diff --git a/packages/google-cloud-parallelstore/.repo-metadata.json b/packages/google-cloud-parallelstore/.repo-metadata.json index 5139f20bc63a..19f092bdde43 100644 --- a/packages/google-cloud-parallelstore/.repo-metadata.json +++ b/packages/google-cloud-parallelstore/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Parallelstore is based on Intel DAOS and delivers up to 6.3x greater read throughput performance compared to competitive Lustre scratch offerings.", - "api_id": "parallelstore.googleapis.com", - "api_shortname": "parallelstore", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-parallelstore/latest", - "default_version": "v1beta", - "distribution_name": "google-cloud-parallelstore", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-parallelstore", - "name_pretty": "Parallelstore API", - "product_documentation": "https://cloud.google.com/parallelstore", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Parallelstore is based on Intel DAOS and delivers up to 6.3x greater read throughput performance compared to competitive Lustre scratch offerings.", + "api_id": "parallelstore.googleapis.com", + "api_shortname": "parallelstore", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-parallelstore/latest", + "default_version": "v1beta", + "distribution_name": "google-cloud-parallelstore", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-parallelstore", + "name_pretty": "Parallelstore API", + "product_documentation": "https://cloud.google.com/parallelstore", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-parametermanager/.repo-metadata.json b/packages/google-cloud-parametermanager/.repo-metadata.json index 6ea21b04edf3..e98199554823 100644 --- a/packages/google-cloud-parametermanager/.repo-metadata.json +++ b/packages/google-cloud-parametermanager/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "(Public Preview) Parameter Manager is a single source of truth to store, access and manage the lifecycle of your workload parameters. Parameter Manager aims to make management of sensitive application parameters effortless for customers without diminishing focus on security. ", - "api_id": "parametermanager.googleapis.com", - "api_shortname": "parametermanager", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-parametermanager/latest", - "default_version": "v1", - "distribution_name": "google-cloud-parametermanager", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1442085&template=2002674", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-parametermanager", - "name_pretty": "Parameter Manager API", - "product_documentation": "https://cloud.google.com/secret-manager/parameter-manager/docs/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "(Public Preview) Parameter Manager is a single source of truth to store, access and manage the lifecycle of your workload parameters. Parameter Manager aims to make management of sensitive application parameters effortless for customers without diminishing focus on security. ", + "api_id": "parametermanager.googleapis.com", + "api_shortname": "parametermanager", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-parametermanager/latest", + "default_version": "v1", + "distribution_name": "google-cloud-parametermanager", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1442085\u0026template=2002674", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-parametermanager", + "name_pretty": "Parameter Manager API", + "product_documentation": "https://cloud.google.com/secret-manager/parameter-manager/docs/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-phishing-protection/.repo-metadata.json b/packages/google-cloud-phishing-protection/.repo-metadata.json index 416540e750e9..ce3c705c398b 100644 --- a/packages/google-cloud-phishing-protection/.repo-metadata.json +++ b/packages/google-cloud-phishing-protection/.repo-metadata.json @@ -1,17 +1,15 @@ { - "api_description": "helps prevent users from accessing phishing sites by identifying various signals associated with malicious content, including the use of your brand assets, classifying malicious content that uses your brand and reporting the unsafe URLs to Google Safe Browsing. Once a site is propagated to Safe Browsing, users will see warnings across more than 4 billion devices.", - "api_id": "phishingprotection.googleapis.com", - "api_shortname": "phishingprotection", - "client_documentation": "https://cloud.google.com/python/docs/reference/phishingprotection/latest", - "default_version": "v1beta1", - "distribution_name": "google-cloud-phishing-protection", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "phishingprotection", - "name_pretty": "Phishing Protection", - "product_documentation": "https://cloud.google.com/phishing-protection/docs/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "helps prevent users from accessing phishing sites by identifying various signals associated with malicious content, including the use of your brand assets, classifying malicious content that uses your brand and reporting the unsafe URLs to Google Safe Browsing. Once a site is propagated to Safe Browsing, users will see warnings across more than 4 billion devices.", + "api_id": "phishingprotection.googleapis.com", + "api_shortname": "phishingprotection", + "client_documentation": "https://cloud.google.com/python/docs/reference/phishingprotection/latest", + "default_version": "v1beta1", + "distribution_name": "google-cloud-phishing-protection", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "phishingprotection", + "name_pretty": "Phishing Protection", + "product_documentation": "https://cloud.google.com/phishing-protection/docs/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-policy-troubleshooter/.repo-metadata.json b/packages/google-cloud-policy-troubleshooter/.repo-metadata.json index 9aaca7f34778..f4d8d4d77b55 100644 --- a/packages/google-cloud-policy-troubleshooter/.repo-metadata.json +++ b/packages/google-cloud-policy-troubleshooter/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "makes it easier to understand why a user has access to a resource or doesn't have permission to call an API. Given an email, resource, and permission, Policy Troubleshooter examines all Identity and Access Management (IAM) policies that apply to the resource. It then reveals whether the member's roles include the permission on that resource and, if so, which policies bind the member to those roles.", - "api_id": "policytroubleshooter.googleapis.com", - "api_shortname": "policytroubleshooter", - "client_documentation": "https://cloud.google.com/python/docs/reference/policytroubleshooter/latest", - "default_version": "v1", - "distribution_name": "google-cloud-policy-troubleshooter", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "policytroubleshooter", - "name_pretty": "IAM Policy Troubleshooter API", - "product_documentation": "https://cloud.google.com/iam/docs/troubleshooting-access#rest-api/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "makes it easier to understand why a user has access to a resource or doesn't have permission to call an API. Given an email, resource, and permission, Policy Troubleshooter examines all Identity and Access Management (IAM) policies that apply to the resource. It then reveals whether the member's roles include the permission on that resource and, if so, which policies bind the member to those roles.", + "api_id": "policytroubleshooter.googleapis.com", + "api_shortname": "policytroubleshooter", + "client_documentation": "https://cloud.google.com/python/docs/reference/policytroubleshooter/latest", + "default_version": "v1", + "distribution_name": "google-cloud-policy-troubleshooter", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=690790\u0026template=1814512", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "policytroubleshooter", + "name_pretty": "IAM Policy Troubleshooter API", + "product_documentation": "https://cloud.google.com/iam/docs/troubleshooting-access#rest-api/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-policysimulator/.repo-metadata.json b/packages/google-cloud-policysimulator/.repo-metadata.json index c382776cb073..c113fb959e42 100644 --- a/packages/google-cloud-policysimulator/.repo-metadata.json +++ b/packages/google-cloud-policysimulator/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Policy Simulator is a collection of endpoints for creating, running, and viewing a `Replay`. A `Replay` is a type of simulation that lets you see how your members' access to resources might change if you changed your IAM policy.", - "api_id": "policysimulator.googleapis.com", - "api_shortname": "policysimulator", - "client_documentation": "https://cloud.google.com/python/docs/reference/policysimulator/latest", - "default_version": "v1", - "distribution_name": "google-cloud-policysimulator", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "policysimulator", - "name_pretty": "Policy Simulator API", - "product_documentation": "https://cloud.google.com/policy-intelligence/docs/iam-simulator-overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Policy Simulator is a collection of endpoints for creating, running, and viewing a `Replay`. A `Replay` is a type of simulation that lets you see how your members' access to resources might change if you changed your IAM policy.", + "api_id": "policysimulator.googleapis.com", + "api_shortname": "policysimulator", + "client_documentation": "https://cloud.google.com/python/docs/reference/policysimulator/latest", + "default_version": "v1", + "distribution_name": "google-cloud-policysimulator", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "policysimulator", + "name_pretty": "Policy Simulator API", + "product_documentation": "https://cloud.google.com/policy-intelligence/docs/iam-simulator-overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-policytroubleshooter-iam/.repo-metadata.json b/packages/google-cloud-policytroubleshooter-iam/.repo-metadata.json index bf21d07276d0..e1b7fab5749c 100644 --- a/packages/google-cloud-policytroubleshooter-iam/.repo-metadata.json +++ b/packages/google-cloud-policytroubleshooter-iam/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "", - "api_id": "policytroubleshooter.googleapis.com", - "api_shortname": "iam", - "client_documentation": "https://cloud.google.com/python/docs/reference/policytroubleshooter-iam/latest", - "default_version": "v3", - "distribution_name": "google-cloud-policytroubleshooter-iam", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=690790&template=1814512", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "policytroubleshooter-iam", - "name_pretty": "Policy Troubleshooter API", - "product_documentation": "https://cloud.google.com/policy-intelligence/docs/troubleshoot-access", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_id": "policytroubleshooter.googleapis.com", + "api_shortname": "iam", + "client_documentation": "https://cloud.google.com/python/docs/reference/policytroubleshooter-iam/latest", + "default_version": "v3", + "distribution_name": "google-cloud-policytroubleshooter-iam", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=690790\u0026template=1814512", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "policytroubleshooter-iam", + "name_pretty": "Policy Troubleshooter API", + "product_documentation": "https://cloud.google.com/policy-intelligence/docs/troubleshoot-access", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam_v3/types/troubleshooter.py b/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam_v3/types/troubleshooter.py index 56dfb07645d5..dc205e8a22ef 100644 --- a/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam_v3/types/troubleshooter.py +++ b/packages/google-cloud-policytroubleshooter-iam/google/cloud/policytroubleshooter_iam_v3/types/troubleshooter.py @@ -18,12 +18,12 @@ from typing import MutableMapping, MutableSequence import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore -from google.cloud.iam_v2 import Policy # type: ignore import google.protobuf.struct_pb2 as struct_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import google.rpc.status_pb2 as status_pb2 # type: ignore import google.type.expr_pb2 as expr_pb2 # type: ignore import proto # type: ignore +from google.cloud.iam_v2 import Policy # type: ignore __protobuf__ = proto.module( package="google.cloud.policytroubleshooter.iam.v3", diff --git a/packages/google-cloud-private-ca/.repo-metadata.json b/packages/google-cloud-private-ca/.repo-metadata.json index 481380b688ac..3d9a40d36f9a 100644 --- a/packages/google-cloud-private-ca/.repo-metadata.json +++ b/packages/google-cloud-private-ca/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "simplifies the deployment and management of private CAs without managing infrastructure.", - "api_id": "privateca.googleapis.com", - "api_shortname": "privateca", - "client_documentation": "https://cloud.google.com/python/docs/reference/privateca/latest", - "default_version": "v1", - "distribution_name": "google-cloud-private-ca", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "privateca", - "name_pretty": "Private Certificate Authority", - "product_documentation": "https://cloud.google.com/certificate-authority-service", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "simplifies the deployment and management of private CAs without managing infrastructure.", + "api_id": "privateca.googleapis.com", + "api_shortname": "privateca", + "client_documentation": "https://cloud.google.com/python/docs/reference/privateca/latest", + "default_version": "v1", + "distribution_name": "google-cloud-private-ca", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "privateca", + "name_pretty": "Private Certificate Authority", + "product_documentation": "https://cloud.google.com/certificate-authority-service", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-private-catalog/.repo-metadata.json b/packages/google-cloud-private-catalog/.repo-metadata.json index 4a205c9a80ff..5b571c3e6521 100644 --- a/packages/google-cloud-private-catalog/.repo-metadata.json +++ b/packages/google-cloud-private-catalog/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "allows developers and cloud admins to make their solutions discoverable to their internal enterprise users. Cloud admins can manage their solutions and ensure their users are always launching the latest versions.", - "api_id": "cloudprivatecatalog.googleapis.com", - "api_shortname": "cloudprivatecatalog", - "client_documentation": "https://cloud.google.com/python/docs/reference/cloudprivatecatalog/latest", - "default_version": "v1beta1", - "distribution_name": "google-cloud-private-catalog", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "cloudprivatecatalog", - "name_pretty": "Private Catalog", - "product_documentation": "https://cloud.google.com/private-catalog/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "allows developers and cloud admins to make their solutions discoverable to their internal enterprise users. Cloud admins can manage their solutions and ensure their users are always launching the latest versions.", + "api_id": "cloudprivatecatalog.googleapis.com", + "api_shortname": "cloudprivatecatalog", + "client_documentation": "https://cloud.google.com/python/docs/reference/cloudprivatecatalog/latest", + "default_version": "v1beta1", + "distribution_name": "google-cloud-private-catalog", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "cloudprivatecatalog", + "name_pretty": "Private Catalog", + "product_documentation": "https://cloud.google.com/private-catalog/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-privilegedaccessmanager/.repo-metadata.json b/packages/google-cloud-privilegedaccessmanager/.repo-metadata.json index e4b53a849b04..a4572c13d2be 100644 --- a/packages/google-cloud-privilegedaccessmanager/.repo-metadata.json +++ b/packages/google-cloud-privilegedaccessmanager/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Privileged Access Manager (PAM) helps you on your journey towards least privilege and helps mitigate risks tied to privileged access misuse or abuse. PAM allows you to shift from always-on standing privileges towards on-demand access with just-in-time, time-bound, and approval-based access elevations. PAM allows IAM administrators to create entitlements that can grant just-in-time, temporary access to any resource scope. Requesters can explore eligible entitlements and request the access needed for their task. Approvers are notified when approvals await their decision. Streamlined workflows facilitated by using PAM can support various use cases, including emergency access for incident responders, time-boxed access for developers for critical deployment or maintenance, temporary access for operators for data ingestion and audits, JIT access to service accounts for automated tasks, and more.", - "api_id": "privilegedaccessmanager.googleapis.com", - "api_shortname": "privilegedaccessmanager", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-privilegedaccessmanager/latest", - "default_version": "v1", - "distribution_name": "google-cloud-privilegedaccessmanager", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-privilegedaccessmanager", - "name_pretty": "Privileged Access Manager API", - "product_documentation": "https://cloud.google.com/iam/docs/pam-overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Privileged Access Manager (PAM) helps you on your journey towards least privilege and helps mitigate risks tied to privileged access misuse or abuse. PAM allows you to shift from always-on standing privileges towards on-demand access with just-in-time, time-bound, and approval-based access elevations. PAM allows IAM administrators to create entitlements that can grant just-in-time, temporary access to any resource scope. Requesters can explore eligible entitlements and request the access needed for their task. Approvers are notified when approvals await their decision. Streamlined workflows facilitated by using PAM can support various use cases, including emergency access for incident responders, time-boxed access for developers for critical deployment or maintenance, temporary access for operators for data ingestion and audits, JIT access to service accounts for automated tasks, and more.", + "api_id": "privilegedaccessmanager.googleapis.com", + "api_shortname": "privilegedaccessmanager", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-privilegedaccessmanager/latest", + "default_version": "v1", + "distribution_name": "google-cloud-privilegedaccessmanager", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-privilegedaccessmanager", + "name_pretty": "Privileged Access Manager API", + "product_documentation": "https://cloud.google.com/iam/docs/pam-overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.repo-metadata.json b/packages/google-cloud-pubsub/.repo-metadata.json index 094260af0cf9..1fdcd0fb22d4 100644 --- a/packages/google-cloud-pubsub/.repo-metadata.json +++ b/packages/google-cloud-pubsub/.repo-metadata.json @@ -1,18 +1,16 @@ { - "name": "pubsub", - "name_pretty": "Google Cloud Pub/Sub", - "product_documentation": "https://cloud.google.com/pubsub/docs/", + "api_description": "is designed to provide reliable, many-to-many, asynchronous messaging between applications. Publisher applications can send messages to a topic and other applications can subscribe to that topic to receive the messages. By decoupling senders and receivers, Google Cloud Pub/Sub allows developers to communicate between independently written applications.", + "api_id": "pubsub.googleapis.com", + "api_shortname": "pubsub", "client_documentation": "https://cloud.google.com/python/docs/reference/pubsub/latest", + "default_version": "v1", + "distribution_name": "google-cloud-pubsub", "issue_tracker": "https://issuetracker.google.com/savedsearches/559741", - "release_level": "stable", "language": "python", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-pubsub", - "api_id": "pubsub.googleapis.com", - "requires_billing": true, - "default_version": "v1", - "codeowner_team": "@googleapis/cloud-sdk-python-team @googleapis/pubsub-team", - "api_shortname": "pubsub", "library_type": "GAPIC_COMBO", - "api_description": "is designed to provide reliable, many-to-many, asynchronous messaging between applications. Publisher applications can send messages to a topic and other applications can subscribe to that topic to receive the messages. By decoupling senders and receivers, Google Cloud Pub/Sub allows developers to communicate between independently written applications." -} + "name": "pubsub", + "name_pretty": "Google Cloud Pub/Sub", + "product_documentation": "https://cloud.google.com/pubsub/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index c14ec14b8619..83e709dba435 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -26,7 +26,7 @@ independently written applications. .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-pubsub.svg :target: https://pypi.org/project/google-cloud-pubsub/ .. _Google Cloud Pub/Sub: https://cloud.google.com/pubsub/docs/ -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/pubsub/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/pubsub/latest/summary_overview .. _Product Documentation: https://cloud.google.com/pubsub/docs/ Quick Start diff --git a/packages/google-cloud-pubsub/docs/README.rst b/packages/google-cloud-pubsub/docs/README.rst index c14ec14b8619..83e709dba435 100644 --- a/packages/google-cloud-pubsub/docs/README.rst +++ b/packages/google-cloud-pubsub/docs/README.rst @@ -26,7 +26,7 @@ independently written applications. .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-pubsub.svg :target: https://pypi.org/project/google-cloud-pubsub/ .. _Google Cloud Pub/Sub: https://cloud.google.com/pubsub/docs/ -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/pubsub/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/pubsub/latest/summary_overview .. _Product Documentation: https://cloud.google.com/pubsub/docs/ Quick Start diff --git a/packages/google-cloud-pubsub/docs/index.rst b/packages/google-cloud-pubsub/docs/index.rst index fe91a42ff00e..367c5af20630 100644 --- a/packages/google-cloud-pubsub/docs/index.rst +++ b/packages/google-cloud-pubsub/docs/index.rst @@ -35,3 +35,8 @@ For a list of all ``google-cloud-pubsub`` releases: :hidden: summary_overview.md + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 51dc1fdd13aa..29f878501282 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -13,9 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import json import functools -import grpc +import json import logging as std_logging import os import re @@ -37,6 +36,7 @@ ) import google.protobuf +import grpc from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index b658add6b91c..d7dd429c438d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -13,9 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import json import functools -import grpc +import json import logging as std_logging import os import re @@ -37,6 +36,7 @@ ) import google.protobuf +import grpc from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index c63c9ecdbec6..0fdb8b35f0b9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -14,8 +14,8 @@ # limitations under the License. # import logging as std_logging -import warnings import re +import warnings from collections import OrderedDict from typing import ( AsyncIterable, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 774ff43ef01b..3a7c05fc3653 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -13,12 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import json import functools -import grpc +import json import logging as std_logging -import re -import warnings import os import re import warnings @@ -41,6 +38,7 @@ ) import google.protobuf +import grpc from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index ca4ee51c39f1..7d95585992d6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -1377,7 +1377,7 @@ class AIInference(proto.Message): that specifies this field must have ``iam.serviceAccounts.actAs`` permission on the service account. If not specified, the Pub/Sub `service - agent <{$universe.dns_names.final_documentation_domain}/iam/docs/service-agents>`__, + agent `__, service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com, is used. """ diff --git a/packages/google-cloud-quotas/.repo-metadata.json b/packages/google-cloud-quotas/.repo-metadata.json index 58cac1840b58..1576d3bb9c50 100644 --- a/packages/google-cloud-quotas/.repo-metadata.json +++ b/packages/google-cloud-quotas/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Cloud Quotas API provides Google Cloud service consumers with management and observability for resource usage, quotas, and restrictions of the services they consume.", - "api_id": "cloudquotas.googleapis.com", - "api_shortname": "cloudquotas", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-cloudquotas/latest", - "default_version": "v1", - "distribution_name": "google-cloud-quotas", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=445904", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-cloudquotas", - "name_pretty": "Cloud Quotas API", - "product_documentation": "https://cloud.google.com/docs/quota/api-overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Cloud Quotas API provides Google Cloud service consumers with management and observability for resource usage, quotas, and restrictions of the services they consume.", + "api_id": "cloudquotas.googleapis.com", + "api_shortname": "cloudquotas", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-cloudquotas/latest", + "default_version": "v1", + "distribution_name": "google-cloud-quotas", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=445904", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-cloudquotas", + "name_pretty": "Cloud Quotas API", + "product_documentation": "https://cloud.google.com/docs/quota/api-overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-rapidmigrationassessment/.repo-metadata.json b/packages/google-cloud-rapidmigrationassessment/.repo-metadata.json index 95bc087e96e3..f8625f2cedf9 100644 --- a/packages/google-cloud-rapidmigrationassessment/.repo-metadata.json +++ b/packages/google-cloud-rapidmigrationassessment/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "The Rapid Migration Assessment service is our first-party migration assessment and planning tool.", - "api_id": "rapidmigrationassessment.googleapis.com", - "api_shortname": "rapidmigrationassessment", - "client_documentation": "https://cloud.google.com/python/docs/reference/rapidmigrationassessment/latest", - "default_version": "v1", - "distribution_name": "google-cloud-rapidmigrationassessment", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "rapidmigrationassessment", - "name_pretty": "Rapid Migration Assessment API", - "product_documentation": "https://cloud.google.com/migration-center/docs", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "The Rapid Migration Assessment service is our first-party migration assessment and planning tool.", + "api_id": "rapidmigrationassessment.googleapis.com", + "api_shortname": "rapidmigrationassessment", + "client_documentation": "https://cloud.google.com/python/docs/reference/rapidmigrationassessment/latest", + "default_version": "v1", + "distribution_name": "google-cloud-rapidmigrationassessment", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "rapidmigrationassessment", + "name_pretty": "Rapid Migration Assessment API", + "product_documentation": "https://cloud.google.com/migration-center/docs", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-recaptcha-enterprise/.repo-metadata.json b/packages/google-cloud-recaptcha-enterprise/.repo-metadata.json index 3ef9f607b895..f1c0516082e0 100644 --- a/packages/google-cloud-recaptcha-enterprise/.repo-metadata.json +++ b/packages/google-cloud-recaptcha-enterprise/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "protect your website from fraudulent activity like scraping, credential stuffing, and automated account creation.", - "api_id": "recaptchaenterprise.googleapis.com", - "api_shortname": "recaptchaenterprise", - "client_documentation": "https://cloud.google.com/python/docs/reference/recaptchaenterprise/latest", - "default_version": "v1", - "distribution_name": "google-cloud-recaptcha-enterprise", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "recaptchaenterprise", - "name_pretty": "reCAPTCHA Enterprise", - "product_documentation": "https://cloud.google.com/recaptcha-enterprise", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "protect your website from fraudulent activity like scraping, credential stuffing, and automated account creation.", + "api_id": "recaptchaenterprise.googleapis.com", + "api_shortname": "recaptchaenterprise", + "client_documentation": "https://cloud.google.com/python/docs/reference/recaptchaenterprise/latest", + "default_version": "v1", + "distribution_name": "google-cloud-recaptcha-enterprise", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "recaptchaenterprise", + "name_pretty": "reCAPTCHA Enterprise", + "product_documentation": "https://cloud.google.com/recaptcha-enterprise", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-recommendations-ai/.repo-metadata.json b/packages/google-cloud-recommendations-ai/.repo-metadata.json index 18353dad9bec..5dec3a1503c2 100644 --- a/packages/google-cloud-recommendations-ai/.repo-metadata.json +++ b/packages/google-cloud-recommendations-ai/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "delivers highly personalized product recommendations at scale.", - "api_id": "recommendationengine.googleapis.com", - "api_shortname": "recommendationengine", - "client_documentation": "https://cloud.google.com/python/docs/reference/recommendationengine/latest", - "default_version": "v1beta1", - "distribution_name": "google-cloud-recommendations-ai", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "recommendationengine", - "name_pretty": "Recommendations AI", - "product_documentation": "https://cloud.google.com/recommendations-ai/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "delivers highly personalized product recommendations at scale.", + "api_id": "recommendationengine.googleapis.com", + "api_shortname": "recommendationengine", + "client_documentation": "https://cloud.google.com/python/docs/reference/recommendationengine/latest", + "default_version": "v1beta1", + "distribution_name": "google-cloud-recommendations-ai", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "recommendationengine", + "name_pretty": "Recommendations AI", + "product_documentation": "https://cloud.google.com/recommendations-ai/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-recommender/.repo-metadata.json b/packages/google-cloud-recommender/.repo-metadata.json index f135cb2f1cb5..37b67e072ae6 100644 --- a/packages/google-cloud-recommender/.repo-metadata.json +++ b/packages/google-cloud-recommender/.repo-metadata.json @@ -1,17 +1,15 @@ { - "api_description": "delivers highly personalized product recommendations at scale.", - "api_id": "recommender.googleapis.com", - "api_shortname": "recommender", - "client_documentation": "https://cloud.google.com/python/docs/reference/recommender/latest", - "default_version": "v1", - "distribution_name": "google-cloud-recommender", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "recommender", - "name_pretty": "Cloud Recommender", - "product_documentation": "https://cloud.google.com/recommender", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "delivers highly personalized product recommendations at scale.", + "api_id": "recommender.googleapis.com", + "api_shortname": "recommender", + "client_documentation": "https://cloud.google.com/python/docs/reference/recommender/latest", + "default_version": "v1", + "distribution_name": "google-cloud-recommender", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "recommender", + "name_pretty": "Cloud Recommender", + "product_documentation": "https://cloud.google.com/recommender", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-redis-cluster/.repo-metadata.json b/packages/google-cloud-redis-cluster/.repo-metadata.json index 3d618a6b1af4..e82db89ac1c8 100644 --- a/packages/google-cloud-redis-cluster/.repo-metadata.json +++ b/packages/google-cloud-redis-cluster/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Creates and manages Redis instances on the Google Cloud Platform.", - "api_id": "cluster.googleapis.com", - "api_shortname": "cluster", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-redis-cluster/latest", - "default_version": "v1", - "distribution_name": "google-cloud-redis-cluster", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1288776&template=1161103", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-redis-cluster", - "name_pretty": "Google Cloud Memorystore for Redis API", - "product_documentation": "https://cloud.google.com/redis/docs", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Creates and manages Redis instances on the Google Cloud Platform.", + "api_id": "cluster.googleapis.com", + "api_shortname": "cluster", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-redis-cluster/latest", + "default_version": "v1", + "distribution_name": "google-cloud-redis-cluster", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1288776\u0026template=1161103", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-redis-cluster", + "name_pretty": "Google Cloud Memorystore for Redis API", + "product_documentation": "https://cloud.google.com/redis/docs", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-redis/.repo-metadata.json b/packages/google-cloud-redis/.repo-metadata.json index d350220de5ef..ff51d55be847 100644 --- a/packages/google-cloud-redis/.repo-metadata.json +++ b/packages/google-cloud-redis/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "is a fully managed Redis service for the Google Cloud. Applications running on Google Cloud can achieve extreme performance by leveraging the highly scalable, available, secure Redis service without the burden of managing complex Redis deployments.", - "api_id": "redis.googleapis.com", - "api_shortname": "redis", - "client_documentation": "https://cloud.google.com/python/docs/reference/redis/latest", - "default_version": "v1", - "distribution_name": "google-cloud-redis", - "issue_tracker": "https://issuetracker.google.com/savedsearches/5169231", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "redis", - "name_pretty": "Cloud Redis", - "product_documentation": "https://cloud.google.com/memorystore/docs/redis/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "is a fully managed Redis service for the Google Cloud. Applications running on Google Cloud can achieve extreme performance by leveraging the highly scalable, available, secure Redis service without the burden of managing complex Redis deployments.", + "api_id": "redis.googleapis.com", + "api_shortname": "redis", + "client_documentation": "https://cloud.google.com/python/docs/reference/redis/latest", + "default_version": "v1", + "distribution_name": "google-cloud-redis", + "issue_tracker": "https://issuetracker.google.com/savedsearches/5169231", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "redis", + "name_pretty": "Cloud Redis", + "product_documentation": "https://cloud.google.com/memorystore/docs/redis/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-resource-manager/.repo-metadata.json b/packages/google-cloud-resource-manager/.repo-metadata.json index 0a053838f07e..42fd6e7a8ee2 100644 --- a/packages/google-cloud-resource-manager/.repo-metadata.json +++ b/packages/google-cloud-resource-manager/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "provides methods that you can use to programmatically manage your projects in the Google Cloud Platform.", - "api_id": "cloudresourcemanager.googleapis.com", - "api_shortname": "cloudresourcemanager", - "client_documentation": "https://cloud.google.com/python/docs/reference/cloudresourcemanager/latest", - "default_version": "v3", - "distribution_name": "google-cloud-resource-manager", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559757", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "cloudresourcemanager", - "name_pretty": "Resource Manager", - "product_documentation": "https://cloud.google.com/resource-manager", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "provides methods that you can use to programmatically manage your projects in the Google Cloud Platform.", + "api_id": "cloudresourcemanager.googleapis.com", + "api_shortname": "cloudresourcemanager", + "client_documentation": "https://cloud.google.com/python/docs/reference/cloudresourcemanager/latest", + "default_version": "v3", + "distribution_name": "google-cloud-resource-manager", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559757", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "cloudresourcemanager", + "name_pretty": "Resource Manager", + "product_documentation": "https://cloud.google.com/resource-manager", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-retail/.repo-metadata.json b/packages/google-cloud-retail/.repo-metadata.json index 8ce418fcf891..05e87c2a0922 100644 --- a/packages/google-cloud-retail/.repo-metadata.json +++ b/packages/google-cloud-retail/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Cloud Retail service enables customers to build end-to-end personalized recommendation systems without requiring a high level of expertise in machine learning, recommendation system, or Google Cloud.", - "api_id": "retail.googleapis.com", - "api_shortname": "retail", - "client_documentation": "https://cloud.google.com/python/docs/reference/retail/latest", - "default_version": "v2", - "distribution_name": "google-cloud-retail", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "retail", - "name_pretty": "Retail", - "product_documentation": "https://cloud.google.com/retail/docs/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Cloud Retail service enables customers to build end-to-end personalized recommendation systems without requiring a high level of expertise in machine learning, recommendation system, or Google Cloud.", + "api_id": "retail.googleapis.com", + "api_shortname": "retail", + "client_documentation": "https://cloud.google.com/python/docs/reference/retail/latest", + "default_version": "v2", + "distribution_name": "google-cloud-retail", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "retail", + "name_pretty": "Retail", + "product_documentation": "https://cloud.google.com/retail/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-run/.repo-metadata.json b/packages/google-cloud-run/.repo-metadata.json index a6973db16fed..6a4defc41a51 100644 --- a/packages/google-cloud-run/.repo-metadata.json +++ b/packages/google-cloud-run/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "is a managed compute platform that enables you to run containers that are invocable via requests or events.", - "api_id": "run.googleapis.com", - "api_shortname": "run", - "client_documentation": "https://cloud.google.com/python/docs/reference/run/latest", - "default_version": "v2", - "distribution_name": "google-cloud-run", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "run", - "name_pretty": "Cloud Run", - "product_documentation": "https://cloud.google.com/run/docs", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "is a managed compute platform that enables you to run containers that are invocable via requests or events.", + "api_id": "run.googleapis.com", + "api_shortname": "run", + "client_documentation": "https://cloud.google.com/python/docs/reference/run/latest", + "default_version": "v2", + "distribution_name": "google-cloud-run", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "run", + "name_pretty": "Cloud Run", + "product_documentation": "https://cloud.google.com/run/docs", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-runtimeconfig/.repo-metadata.json b/packages/google-cloud-runtimeconfig/.repo-metadata.json index 1bd04a41bf6b..7d491d04e02e 100644 --- a/packages/google-cloud-runtimeconfig/.repo-metadata.json +++ b/packages/google-cloud-runtimeconfig/.repo-metadata.json @@ -1,17 +1,14 @@ { - "name": "runtimeconfig", - "name_pretty": "Google Cloud Runtime Configurator", - "product_documentation": "https://cloud.google.com/deployment-manager/runtime-configurator/", + "api_id": "runtimeconfig.googleapis.com", + "api_shortname": "runtimeconfig", "client_documentation": "https://cloud.google.com/python/docs/reference/runtimeconfig/latest", + "distribution_name": "google-cloud-runtimeconfig", "issue_tracker": "https://issuetracker.google.com/savedsearches/559663", - "release_level": "preview", "language": "python", "library_type": "GAPIC_MANUAL", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-runtimeconfig", - "api_id": "runtimeconfig.googleapis.com", - "requires_billing": true, - "default_version": "", - "codeowner_team": "", - "api_shortname": "runtimeconfig" -} + "name": "runtimeconfig", + "name_pretty": "Google Cloud Runtime Configurator", + "product_documentation": "https://cloud.google.com/deployment-manager/runtime-configurator/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-runtimeconfig/docs/README.rst b/packages/google-cloud-runtimeconfig/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/packages/google-cloud-runtimeconfig/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/packages/google-cloud-runtimeconfig/docs/README.rst b/packages/google-cloud-runtimeconfig/docs/README.rst new file mode 100644 index 000000000000..46a224c8fab0 --- /dev/null +++ b/packages/google-cloud-runtimeconfig/docs/README.rst @@ -0,0 +1,189 @@ +Python Client for Google Cloud Runtime Configurator API +======================================================= + +|preview| |pypi| |versions| + +`Google Cloud Runtime Configurator API`_: + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-runtimeconfig.svg + :target: https://pypi.org/project/google-cloud-runtimeconfig/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-runtimeconfig.svg + :target: https://pypi.org/project/google-cloud-runtimeconfig/ +.. _Google Cloud Runtime Configurator API: https://cloud.google.com/deployment-manager/runtime-configurator/ +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/runtimeconfig/latest/summary_overview +.. _Product Documentation: https://cloud.google.com/deployment-manager/runtime-configurator/ + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Cloud Runtime Configurator API.`_ +4. `Set up Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Cloud Runtime Configurator API.: https://cloud.google.com/deployment-manager/runtime-configurator/ +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.9 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.8 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-runtimeconfig + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-runtimeconfig + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Google Cloud Runtime Configurator API + to see other available methods on the client. +- Read the `Google Cloud Runtime Configurator API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Google Cloud Runtime Configurator API Product documentation: https://cloud.google.com/deployment-manager/runtime-configurator/ +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + +Environment-Based Examples +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + +Code-Based Examples +^^^^^^^^^^^^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud import library_v1 + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud import library_v1 + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/packages/google-cloud-runtimeconfig/docs/conf.py b/packages/google-cloud-runtimeconfig/docs/conf.py index 1baed3c8bf81..8907563e4868 100644 --- a/packages/google-cloud-runtimeconfig/docs/conf.py +++ b/packages/google-cloud-runtimeconfig/docs/conf.py @@ -24,9 +24,9 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os import shlex +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the diff --git a/packages/google-cloud-runtimeconfig/google/cloud/runtimeconfig/_http.py b/packages/google-cloud-runtimeconfig/google/cloud/runtimeconfig/_http.py index 44e8d8455cc1..acd7188e31a5 100644 --- a/packages/google-cloud-runtimeconfig/google/cloud/runtimeconfig/_http.py +++ b/packages/google-cloud-runtimeconfig/google/cloud/runtimeconfig/_http.py @@ -17,7 +17,6 @@ from google.cloud import _http - from google.cloud.runtimeconfig import __version__ diff --git a/packages/google-cloud-runtimeconfig/google/cloud/runtimeconfig/config.py b/packages/google-cloud-runtimeconfig/google/cloud/runtimeconfig/config.py index 3e02b6e6cce3..d48338818f0f 100644 --- a/packages/google-cloud-runtimeconfig/google/cloud/runtimeconfig/config.py +++ b/packages/google-cloud-runtimeconfig/google/cloud/runtimeconfig/config.py @@ -16,6 +16,7 @@ from google.api_core import page_iterator from google.cloud.exceptions import NotFound + from google.cloud.runtimeconfig._helpers import config_name_from_full_name from google.cloud.runtimeconfig.variable import Variable diff --git a/packages/google-cloud-runtimeconfig/google/cloud/runtimeconfig/variable.py b/packages/google-cloud-runtimeconfig/google/cloud/runtimeconfig/variable.py index 11f17ccb8b8e..4aaad8b31249 100644 --- a/packages/google-cloud-runtimeconfig/google/cloud/runtimeconfig/variable.py +++ b/packages/google-cloud-runtimeconfig/google/cloud/runtimeconfig/variable.py @@ -40,10 +40,10 @@ from google.api_core import datetime_helpers from google.cloud.exceptions import Conflict, NotFound + from google.cloud.runtimeconfig._helpers import variable_name_from_full_name from google.cloud.runtimeconfig.exceptions import Error - STATE_UNSPECIFIED = "VARIABLE_STATE_UNSPECIFIED" STATE_UPDATED = "UPDATED" STATE_DELETED = "DELETED" diff --git a/packages/google-cloud-runtimeconfig/setup.py b/packages/google-cloud-runtimeconfig/setup.py index 988bf437ec7e..11625b0de86f 100644 --- a/packages/google-cloud-runtimeconfig/setup.py +++ b/packages/google-cloud-runtimeconfig/setup.py @@ -17,7 +17,6 @@ import setuptools - # Package metadata. name = "google-cloud-runtimeconfig" diff --git a/packages/google-cloud-runtimeconfig/tests/unit/test__http.py b/packages/google-cloud-runtimeconfig/tests/unit/test__http.py index 39d34832aabd..c5842915e7f2 100644 --- a/packages/google-cloud-runtimeconfig/tests/unit/test__http.py +++ b/packages/google-cloud-runtimeconfig/tests/unit/test__http.py @@ -33,8 +33,7 @@ def test_default_url(self): self.assertIs(conn._client, client) def test_build_api_url_no_extra_query_params(self): - from urllib.parse import parse_qsl - from urllib.parse import urlsplit + from urllib.parse import parse_qsl, urlsplit conn = self._make_one(object()) uri = conn.build_api_url("/foo") @@ -47,8 +46,7 @@ def test_build_api_url_no_extra_query_params(self): self.assertEqual(parms, {}) def test_build_api_url_w_custom_endpoint(self): - from urllib.parse import parse_qsl - from urllib.parse import urlsplit + from urllib.parse import parse_qsl, urlsplit custom_endpoint = "https://foo-runtimeconfig.googleapis.com" conn = self._make_one(object(), api_endpoint=custom_endpoint) @@ -62,8 +60,7 @@ def test_build_api_url_w_custom_endpoint(self): self.assertEqual(parms, {}) def test_build_api_url_w_extra_query_params(self): - from urllib.parse import parse_qsl - from urllib.parse import urlsplit + from urllib.parse import parse_qsl, urlsplit conn = self._make_one(object()) uri = conn.build_api_url("/foo", {"bar": "baz"}) @@ -75,6 +72,7 @@ def test_build_api_url_w_extra_query_params(self): def test_extra_headers(self): import requests + from google.cloud import _http as base_http http = mock.create_autospec(requests.Session, instance=True) diff --git a/packages/google-cloud-runtimeconfig/tests/unit/test_client.py b/packages/google-cloud-runtimeconfig/tests/unit/test_client.py index a0fef69c4010..2d1e0f69a30f 100644 --- a/packages/google-cloud-runtimeconfig/tests/unit/test_client.py +++ b/packages/google-cloud-runtimeconfig/tests/unit/test_client.py @@ -35,6 +35,7 @@ def _make_one(self, *args, **kw): def test_ctor_wo_client_info(self): from google.cloud._http import ClientInfo + from google.cloud.runtimeconfig._http import Connection project = "PROJECT" @@ -49,6 +50,7 @@ def test_ctor_wo_client_info(self): def test_ctor_w_client_info(self): from google.cloud._http import ClientInfo + from google.cloud.runtimeconfig._http import Connection project = "PROJECT" diff --git a/packages/google-cloud-runtimeconfig/tests/unit/test_config.py b/packages/google-cloud-runtimeconfig/tests/unit/test_config.py index 71ff480d706c..d17eb3b32dca 100644 --- a/packages/google-cloud-runtimeconfig/tests/unit/test_config.py +++ b/packages/google-cloud-runtimeconfig/tests/unit/test_config.py @@ -231,6 +231,7 @@ def test_list_variables_empty(self): def test_list_variables_defaults(self): from google.cloud._helpers import _rfc3339_to_datetime + from google.cloud.runtimeconfig.variable import Variable VARIABLE_1 = "variable-one" @@ -277,6 +278,7 @@ def test_list_variables_defaults(self): def test_list_variables_explicit(self): from google.cloud._helpers import _rfc3339_to_datetime + from google.cloud.runtimeconfig.variable import Variable VARIABLE_1 = "variable-one" diff --git a/packages/google-cloud-runtimeconfig/tests/unit/test_variable.py b/packages/google-cloud-runtimeconfig/tests/unit/test_variable.py index 063b04281432..07f459395a4d 100644 --- a/packages/google-cloud-runtimeconfig/tests/unit/test_variable.py +++ b/packages/google-cloud-runtimeconfig/tests/unit/test_variable.py @@ -32,6 +32,7 @@ def _make_one(self, *args, **kw): def _verifyResourceProperties(self, variable, resource): import base64 + from google.api_core import datetime_helpers if "name" in resource: @@ -131,6 +132,7 @@ def test_create_no_data(self): def test_create_conflict(self): from google.cloud.exceptions import Conflict + from google.cloud.runtimeconfig.config import Config conn = _Connection(Conflict("test")) diff --git a/packages/google-cloud-saasplatform-saasservicemgmt/.repo-metadata.json b/packages/google-cloud-saasplatform-saasservicemgmt/.repo-metadata.json index 3c5759220f6c..d5e8cb4e7d31 100644 --- a/packages/google-cloud-saasplatform-saasservicemgmt/.repo-metadata.json +++ b/packages/google-cloud-saasplatform-saasservicemgmt/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "SaaS Runtime lets you store, host, manage, and monitor software as a service (SaaS) applications on Google Cloud.", - "api_id": "saasservicemgmt.googleapis.com", - "api_shortname": "saasservicemgmt", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-saasplatform-saasservicemgmt/latest", - "default_version": "v1beta1", - "distribution_name": "google-cloud-saasplatform-saasservicemgmt", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-saasplatform-saasservicemgmt", - "name_pretty": "SaaS Runtime API", - "product_documentation": "https://cloud.google.com/saas-runtime/docs/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "SaaS Runtime lets you store, host, manage, and monitor software as a service (SaaS) applications on Google Cloud.", + "api_id": "saasservicemgmt.googleapis.com", + "api_shortname": "saasservicemgmt", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-saasplatform-saasservicemgmt/latest", + "default_version": "v1beta1", + "distribution_name": "google-cloud-saasplatform-saasservicemgmt", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-saasplatform-saasservicemgmt", + "name_pretty": "SaaS Runtime API", + "product_documentation": "https://cloud.google.com/saas-runtime/docs/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-scheduler/.repo-metadata.json b/packages/google-cloud-scheduler/.repo-metadata.json index c8e70aea1c68..be234c522836 100644 --- a/packages/google-cloud-scheduler/.repo-metadata.json +++ b/packages/google-cloud-scheduler/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "lets you set up scheduled units of work to be executed at defined times or regular intervals. These work units are commonly known as cron jobs. Typical use cases might include sending out a report email on a daily basis, updating some cached data every 10 minutes, or updating some summary information once an hour.", - "api_id": "cloudscheduler.googleapis.com", - "api_shortname": "cloudscheduler", - "client_documentation": "https://cloud.google.com/python/docs/reference/cloudscheduler/latest", - "default_version": "v1", - "distribution_name": "google-cloud-scheduler", - "issue_tracker": "https://issuetracker.google.com/savedsearches/5411429", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "cloudscheduler", - "name_pretty": "Cloud Scheduler", - "product_documentation": "https://cloud.google.com/scheduler/docs", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "lets you set up scheduled units of work to be executed at defined times or regular intervals. These work units are commonly known as cron jobs. Typical use cases might include sending out a report email on a daily basis, updating some cached data every 10 minutes, or updating some summary information once an hour.", + "api_id": "cloudscheduler.googleapis.com", + "api_shortname": "cloudscheduler", + "client_documentation": "https://cloud.google.com/python/docs/reference/cloudscheduler/latest", + "default_version": "v1", + "distribution_name": "google-cloud-scheduler", + "issue_tracker": "https://issuetracker.google.com/savedsearches/5411429", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "cloudscheduler", + "name_pretty": "Cloud Scheduler", + "product_documentation": "https://cloud.google.com/scheduler/docs", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-secret-manager/.repo-metadata.json b/packages/google-cloud-secret-manager/.repo-metadata.json index 9d480db78bdc..4fea18035351 100644 --- a/packages/google-cloud-secret-manager/.repo-metadata.json +++ b/packages/google-cloud-secret-manager/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Stores, manages, and secures access to application secrets.", - "api_id": "secretmanager.googleapis.com", - "api_shortname": "secretmanager", - "client_documentation": "https://cloud.google.com/python/docs/reference/secretmanager/latest", - "default_version": "v1", - "distribution_name": "google-cloud-secret-manager", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "secretmanager", - "name_pretty": "Secret Manager", - "product_documentation": "https://cloud.google.com/secret-manager/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Stores, manages, and secures access to application secrets.", + "api_id": "secretmanager.googleapis.com", + "api_shortname": "secretmanager", + "client_documentation": "https://cloud.google.com/python/docs/reference/secretmanager/latest", + "default_version": "v1", + "distribution_name": "google-cloud-secret-manager", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=784854\u0026template=1380926", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "secretmanager", + "name_pretty": "Secret Manager", + "product_documentation": "https://cloud.google.com/secret-manager/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-securesourcemanager/.repo-metadata.json b/packages/google-cloud-securesourcemanager/.repo-metadata.json index 2ff4204edac0..e5cbc672029c 100644 --- a/packages/google-cloud-securesourcemanager/.repo-metadata.json +++ b/packages/google-cloud-securesourcemanager/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Regionally deployed, single-tenant managed source code repository hosted on Google Cloud.", - "api_id": "securesourcemanager.googleapis.com", - "api_shortname": "securesourcemanager", - "client_documentation": "https://cloud.google.com/python/docs/reference/securesourcemanager/latest", - "default_version": "v1", - "distribution_name": "google-cloud-securesourcemanager", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "securesourcemanager", - "name_pretty": "Secure Source Manager API", - "product_documentation": "https://cloud.google.com/secure-source-manager/docs/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Regionally deployed, single-tenant managed source code repository hosted on Google Cloud.", + "api_id": "securesourcemanager.googleapis.com", + "api_shortname": "securesourcemanager", + "client_documentation": "https://cloud.google.com/python/docs/reference/securesourcemanager/latest", + "default_version": "v1", + "distribution_name": "google-cloud-securesourcemanager", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "securesourcemanager", + "name_pretty": "Secure Source Manager API", + "product_documentation": "https://cloud.google.com/secure-source-manager/docs/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-security-publicca/.repo-metadata.json b/packages/google-cloud-security-publicca/.repo-metadata.json index ad4a106b5e3e..8a4c6c398f90 100644 --- a/packages/google-cloud-security-publicca/.repo-metadata.json +++ b/packages/google-cloud-security-publicca/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "simplifies the deployment and management of public CAs without managing infrastructure.", - "api_id": "publicca.googleapis.com", - "api_shortname": "publicca", - "client_documentation": "https://cloud.google.com/python/docs/reference/publicca/latest", - "default_version": "v1", - "distribution_name": "google-cloud-security-publicca", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "publicca", - "name_pretty": "Public Certificate Authority", - "product_documentation": "https://cloud.google.com/certificate-manager/docs/public-ca", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "simplifies the deployment and management of public CAs without managing infrastructure.", + "api_id": "publicca.googleapis.com", + "api_shortname": "publicca", + "client_documentation": "https://cloud.google.com/python/docs/reference/publicca/latest", + "default_version": "v1", + "distribution_name": "google-cloud-security-publicca", + "issue_tracker": "https://cloud.google.com/certificate-manager/docs/getting-support", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "publicca", + "name_pretty": "Public Certificate Authority", + "product_documentation": "https://cloud.google.com/certificate-manager/docs/public-ca", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-securitycenter/.repo-metadata.json b/packages/google-cloud-securitycenter/.repo-metadata.json index f0294e90b0af..026da2943976 100644 --- a/packages/google-cloud-securitycenter/.repo-metadata.json +++ b/packages/google-cloud-securitycenter/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "makes it easier for you to prevent, detect, and respond to threats. Identify security misconfigurations in virtual machines, networks, applications, and storage buckets from a centralized dashboard. Take action on them before they can potentially result in business damage or loss. Built-in capabilities can quickly surface suspicious activity in your Stackdriver security logs or indicate compromised virtual machines. Respond to threats by following actionable recommendations or exporting logs to your SIEM for further investigation.", - "api_id": "securitycenter.googleapis.com", - "api_shortname": "securitycenter", - "client_documentation": "https://cloud.google.com/python/docs/reference/securitycenter/latest", - "default_version": "v1", - "distribution_name": "google-cloud-securitycenter", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559748", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "securitycenter", - "name_pretty": "Google Cloud Security Command Center", - "product_documentation": "https://cloud.google.com/security-command-center", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "makes it easier for you to prevent, detect, and respond to threats. Identify security misconfigurations in virtual machines, networks, applications, and storage buckets from a centralized dashboard. Take action on them before they can potentially result in business damage or loss. Built-in capabilities can quickly surface suspicious activity in your Stackdriver security logs or indicate compromised virtual machines. Respond to threats by following actionable recommendations or exporting logs to your SIEM for further investigation.", + "api_id": "securitycenter.googleapis.com", + "api_shortname": "securitycenter", + "client_documentation": "https://cloud.google.com/python/docs/reference/securitycenter/latest", + "default_version": "v1", + "distribution_name": "google-cloud-securitycenter", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559748", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "securitycenter", + "name_pretty": "Google Cloud Security Command Center", + "product_documentation": "https://cloud.google.com/security-command-center", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-securitycentermanagement/.repo-metadata.json b/packages/google-cloud-securitycentermanagement/.repo-metadata.json index e23b24b7e16e..38d04533fc8f 100644 --- a/packages/google-cloud-securitycentermanagement/.repo-metadata.json +++ b/packages/google-cloud-securitycentermanagement/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "", - "api_id": "securitycenter.googleapis.com", - "api_shortname": "securitycenter", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-securitycentermanagement/latest", - "default_version": "v1", - "distribution_name": "google-cloud-securitycentermanagement", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-securitycentermanagement", - "name_pretty": "Security Center Management API", - "product_documentation": "https://cloud.google.com/securitycentermanagement/docs/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Management API for Security Command Center, a built-in security and risk\nmanagement solution for Google Cloud. Use this API to programmatically\nupdate the settings and configuration of Security Command Center.", + "api_id": "securitycenter.googleapis.com", + "api_shortname": "securitycenter", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-securitycentermanagement/latest", + "default_version": "v1", + "distribution_name": "google-cloud-securitycentermanagement", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-securitycentermanagement", + "name_pretty": "Security Center Management API", + "product_documentation": "https://cloud.google.com/securitycentermanagement/docs/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-securitycentermanagement/README.rst b/packages/google-cloud-securitycentermanagement/README.rst index e8654dbd17ff..b5cbca1d6497 100644 --- a/packages/google-cloud-securitycentermanagement/README.rst +++ b/packages/google-cloud-securitycentermanagement/README.rst @@ -3,7 +3,9 @@ Python Client for Security Center Management API |preview| |pypi| |versions| -`Security Center Management API`_: +`Security Center Management API`_: Management API for Security Command Center, a built-in security and risk +management solution for Google Cloud. Use this API to programmatically +update the settings and configuration of Security Command Center. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-securitycentermanagement/docs/README.rst b/packages/google-cloud-securitycentermanagement/docs/README.rst index e8654dbd17ff..b5cbca1d6497 100644 --- a/packages/google-cloud-securitycentermanagement/docs/README.rst +++ b/packages/google-cloud-securitycentermanagement/docs/README.rst @@ -3,7 +3,9 @@ Python Client for Security Center Management API |preview| |pypi| |versions| -`Security Center Management API`_: +`Security Center Management API`_: Management API for Security Command Center, a built-in security and risk +management solution for Google Cloud. Use this API to programmatically +update the settings and configuration of Security Command Center. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-service-control/.repo-metadata.json b/packages/google-cloud-service-control/.repo-metadata.json index e8f723cf1b9e..672cbf34cac1 100644 --- a/packages/google-cloud-service-control/.repo-metadata.json +++ b/packages/google-cloud-service-control/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": " is a foundational platform for creating, managing, securing, and consuming APIs and services across organizations. It is used by Google APIs, Cloud APIs, Cloud Endpoints, and API Gateway.", - "api_id": "servicecontrol.googleapis.com", - "api_shortname": "servicecontrol", - "client_documentation": "https://cloud.google.com/python/docs/reference/servicecontrol/latest", - "default_version": "v1", - "distribution_name": "google-cloud-service-control", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "servicecontrol", - "name_pretty": "Service Control", - "product_documentation": "https://cloud.google.com/service-infrastructure/docs/overview/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": " is a foundational platform for creating, managing, securing, and consuming APIs and services across organizations. It is used by Google APIs, Cloud APIs, Cloud Endpoints, and API Gateway.", + "api_id": "servicecontrol.googleapis.com", + "api_shortname": "servicecontrol", + "client_documentation": "https://cloud.google.com/python/docs/reference/servicecontrol/latest", + "default_version": "v1", + "distribution_name": "google-cloud-service-control", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "servicecontrol", + "name_pretty": "Service Control", + "product_documentation": "https://cloud.google.com/service-infrastructure/docs/overview/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-service-directory/.repo-metadata.json b/packages/google-cloud-service-directory/.repo-metadata.json index 35db7e849b9a..37698b4deeac 100644 --- a/packages/google-cloud-service-directory/.repo-metadata.json +++ b/packages/google-cloud-service-directory/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Allows the registration and lookup of services.", - "api_id": "servicedirectory.googleapis.com", - "api_shortname": "servicedirectory", - "client_documentation": "https://cloud.google.com/python/docs/reference/servicedirectory/latest", - "default_version": "v1", - "distribution_name": "google-cloud-service-directory", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "servicedirectory", - "name_pretty": "Service Directory", - "product_documentation": "https://cloud.google.com/service-directory/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Allows the registration and lookup of services.", + "api_id": "servicedirectory.googleapis.com", + "api_shortname": "servicedirectory", + "client_documentation": "https://cloud.google.com/python/docs/reference/servicedirectory/latest", + "default_version": "v1", + "distribution_name": "google-cloud-service-directory", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "servicedirectory", + "name_pretty": "Service Directory", + "product_documentation": "https://cloud.google.com/service-directory/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-service-management/.repo-metadata.json b/packages/google-cloud-service-management/.repo-metadata.json index b967c15063a6..445194af4a4b 100644 --- a/packages/google-cloud-service-management/.repo-metadata.json +++ b/packages/google-cloud-service-management/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "is a foundational platform for creating, managing, securing, and consuming APIs and services across organizations. It is used by Google APIs, Cloud APIs, Cloud Endpoints, and API Gateway. Service Infrastructure provides a wide range of features to service consumers and service producers, including authentication, authorization, auditing, rate limiting, analytics, billing, logging, and monitoring.", - "api_id": "servicemanagement.googleapis.com", - "api_shortname": "servicemanagement", - "client_documentation": "https://cloud.google.com/python/docs/reference/servicemanagement/latest", - "default_version": "v1", - "distribution_name": "google-cloud-service-management", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "servicemanagement", - "name_pretty": "Service Management", - "product_documentation": "https://cloud.google.com/service-infrastructure/docs/overview/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "is a foundational platform for creating, managing, securing, and consuming APIs and services across organizations. It is used by Google APIs, Cloud APIs, Cloud Endpoints, and API Gateway. Service Infrastructure provides a wide range of features to service consumers and service producers, including authentication, authorization, auditing, rate limiting, analytics, billing, logging, and monitoring.", + "api_id": "servicemanagement.googleapis.com", + "api_shortname": "servicemanagement", + "client_documentation": "https://cloud.google.com/python/docs/reference/servicemanagement/latest", + "default_version": "v1", + "distribution_name": "google-cloud-service-management", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "servicemanagement", + "name_pretty": "Service Management", + "product_documentation": "https://cloud.google.com/service-infrastructure/docs/overview/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py b/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py index da7a5918ccc8..6b376ce3f025 100644 --- a/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py +++ b/packages/google-cloud-service-management/tests/unit/gapic/servicemanagement_v1/test_service_manager.py @@ -10751,6 +10751,7 @@ def test_create_service_config_rest_call_success(request_type): "disable_auth": True, "protocol": "protocol_value", "overrides_by_request_protocol": {}, + "load_balancing_policy": "load_balancing_policy_value", } ] }, @@ -10973,6 +10974,26 @@ def test_create_service_config_rest_call_success(request_type): "auto_populated_fields_value1", "auto_populated_fields_value2", ], + "batching": { + "thresholds": { + "element_count_threshold": 2462, + "request_byte_threshold": 2376, + "delay_threshold": {}, + "element_count_limit": 2032, + "request_byte_limit": 1946, + "flow_control_element_limit": 2783, + "flow_control_byte_limit": 2473, + "flow_control_limit_exceeded_behavior": 1, + }, + "batch_descriptor": { + "batched_field": "batched_field_value", + "discriminator_fields": [ + "discriminator_fields_value1", + "discriminator_fields_value2", + ], + "subresponse_field": "subresponse_field_value", + }, + }, } ], "new_issue_uri": "new_issue_uri_value", @@ -11003,7 +11024,10 @@ def test_create_service_config_rest_call_success(request_type): }, }, "cpp_settings": {"common": {}}, - "php_settings": {"common": {}}, + "php_settings": { + "common": {}, + "library_package": "library_package_value", + }, "python_settings": { "common": {}, "experimental_features": { diff --git a/packages/google-cloud-service-usage/.repo-metadata.json b/packages/google-cloud-service-usage/.repo-metadata.json index cf3d52f29698..ad0a5e0cbbe3 100644 --- a/packages/google-cloud-service-usage/.repo-metadata.json +++ b/packages/google-cloud-service-usage/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "is an infrastructure service of Google Cloud that lets you list and manage other APIs and services in your Cloud projects.", - "api_id": "serviceusage.googleapis.com", - "api_shortname": "serviceusage", - "client_documentation": "https://cloud.google.com/python/docs/reference/serviceusage/latest", - "default_version": "v1", - "distribution_name": "google-cloud-service-usage", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "serviceusage", - "name_pretty": "Service Usage", - "product_documentation": "https://cloud.google.com/service-usage", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "is an infrastructure service of Google Cloud that lets you list and manage other APIs and services in your Cloud projects.", + "api_id": "serviceusage.googleapis.com", + "api_shortname": "serviceusage", + "client_documentation": "https://cloud.google.com/python/docs/reference/serviceusage/latest", + "default_version": "v1", + "distribution_name": "google-cloud-service-usage", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "serviceusage", + "name_pretty": "Service Usage", + "product_documentation": "https://cloud.google.com/service-usage", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-servicehealth/.repo-metadata.json b/packages/google-cloud-servicehealth/.repo-metadata.json index 9ae24fe6d186..3b02c72cdc4a 100644 --- a/packages/google-cloud-servicehealth/.repo-metadata.json +++ b/packages/google-cloud-servicehealth/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Personalized Service Health helps you gain visibility into disruptive events impacting Google Cloud products.", - "api_id": "servicehealth.googleapis.com", - "api_shortname": "servicehealth", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-servicehealth/latest", - "default_version": "v1", - "distribution_name": "google-cloud-servicehealth", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1466723&template=1161103", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-servicehealth", - "name_pretty": "Service Health API", - "product_documentation": "https://cloud.google.com/service-health/docs/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Personalized Service Health helps you gain visibility into disruptive events impacting Google Cloud products.", + "api_id": "servicehealth.googleapis.com", + "api_shortname": "servicehealth", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-servicehealth/latest", + "default_version": "v1", + "distribution_name": "google-cloud-servicehealth", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1466723\u0026template=1161103", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-servicehealth", + "name_pretty": "Service Health API", + "product_documentation": "https://cloud.google.com/service-health/docs/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-shell/.repo-metadata.json b/packages/google-cloud-shell/.repo-metadata.json index 4ddb86c94a79..de251db60b60 100644 --- a/packages/google-cloud-shell/.repo-metadata.json +++ b/packages/google-cloud-shell/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "is an interactive shell environment for Google Cloud that makes it easy for you to learn and experiment with Google Cloud and manage your projects and resources from your web browser.", - "api_id": "cloudshell.googleapis.com", - "api_shortname": "cloudshell", - "client_documentation": "https://cloud.google.com/python/docs/reference/cloudshell/latest", - "default_version": "v1", - "distribution_name": "google-cloud-shell", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "cloudshell", - "name_pretty": "Cloud Shell", - "product_documentation": "https://cloud.google.com/shell/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "is an interactive shell environment for Google Cloud that makes it easy for you to learn and experiment with Google Cloud and manage your projects and resources from your web browser.", + "api_id": "cloudshell.googleapis.com", + "api_shortname": "cloudshell", + "client_documentation": "https://cloud.google.com/python/docs/reference/cloudshell/latest", + "default_version": "v1", + "distribution_name": "google-cloud-shell", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "cloudshell", + "name_pretty": "Cloud Shell", + "product_documentation": "https://cloud.google.com/shell/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-source-context/.repo-metadata.json b/packages/google-cloud-source-context/.repo-metadata.json index 53dc346eaf57..50fe10fad697 100644 --- a/packages/google-cloud-source-context/.repo-metadata.json +++ b/packages/google-cloud-source-context/.repo-metadata.json @@ -1,15 +1,15 @@ { - "api_id": "source.googleapis.com", - "api_shortname": "source", - "client_documentation": "https://cloud.google.com/python/docs/reference/source/latest", - "default_version": "v1", - "distribution_name": "google-cloud-source-context", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "OTHER", - "name": "source", - "name_pretty": "Source Context", - "product_documentation": "https://cloud.google.com", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_id": "source.googleapis.com", + "api_shortname": "source", + "client_documentation": "https://cloud.google.com/python/docs/reference/source/latest", + "default_version": "v1", + "distribution_name": "google-cloud-source-context", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "OTHER", + "name": "source", + "name_pretty": "Source Context", + "product_documentation": "https://cloud.google.com", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-spanner/.repo-metadata.json b/packages/google-cloud-spanner/.repo-metadata.json index 9f9f2f0fc02a..ecd29547a09b 100644 --- a/packages/google-cloud-spanner/.repo-metadata.json +++ b/packages/google-cloud-spanner/.repo-metadata.json @@ -1,18 +1,16 @@ { - "name": "spanner", - "name_pretty": "Cloud Spanner", - "product_documentation": "https://cloud.google.com/spanner/docs/", + "api_description": "is the world's first fully managed relational database service \nto offer both strong consistency and horizontal scalability for \nmission-critical online transaction processing (OLTP) applications. With Cloud \nSpanner you enjoy all the traditional benefits of a relational database; but \nunlike any other relational database service, Cloud Spanner scales horizontally \nto hundreds or thousands of servers to handle the biggest transactional \nworkloads.", + "api_id": "spanner.googleapis.com", + "api_shortname": "spanner", "client_documentation": "https://cloud.google.com/python/docs/reference/spanner/latest", + "default_version": "v1", + "distribution_name": "google-cloud-spanner", "issue_tracker": "https://issuetracker.google.com/issues?q=componentid:190851%2B%20status:open", - "release_level": "stable", "language": "python", "library_type": "GAPIC_COMBO", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-spanner", - "api_id": "spanner.googleapis.com", - "requires_billing": true, - "default_version": "v1", - "codeowner_team": "@googleapis/spanner-team", - "api_shortname": "spanner", - "api_description": "is the world's first fully managed relational database service \nto offer both strong consistency and horizontal scalability for \nmission-critical online transaction processing (OLTP) applications. With Cloud \nSpanner you enjoy all the traditional benefits of a relational database; but \nunlike any other relational database service, Cloud Spanner scales horizontally \nto hundreds or thousands of servers to handle the biggest transactional \nworkloads." -} + "name": "spanner", + "name_pretty": "Cloud Spanner", + "product_documentation": "https://cloud.google.com/spanner/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-spanner/benchmark/benchwrapper/proto/spanner_pb2.py b/packages/google-cloud-spanner/benchmark/benchwrapper/proto/spanner_pb2.py index e2d9b1a8250a..f922b907fb44 100644 --- a/packages/google-cloud-spanner/benchmark/benchwrapper/proto/spanner_pb2.py +++ b/packages/google-cloud-spanner/benchmark/benchwrapper/proto/spanner_pb2.py @@ -1,4 +1,19 @@ # -*- coding: utf-8 -*- + +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by the protocol buffer compiler. DO NOT EDIT! # source: benchmark/benchwrapper/proto/spanner.proto """Generated protocol buffer code.""" diff --git a/packages/google-cloud-spanner/docs/spanner_v1/batch.rst b/packages/google-cloud-spanner/docs/spanner_v1/batch.rst deleted file mode 100644 index ecd51b01cdd9..000000000000 --- a/packages/google-cloud-spanner/docs/spanner_v1/batch.rst +++ /dev/null @@ -1,6 +0,0 @@ -Batch API -========= - -.. automodule:: google.cloud.spanner_v1.batch - :members: - :show-inheritance: diff --git a/packages/google-cloud-spanner/docs/spanner_v1/client.rst b/packages/google-cloud-spanner/docs/spanner_v1/client.rst deleted file mode 100644 index 3cc5a89b2137..000000000000 --- a/packages/google-cloud-spanner/docs/spanner_v1/client.rst +++ /dev/null @@ -1,7 +0,0 @@ -Spanner Client -============== - -.. automodule:: google.cloud.spanner_v1.client - :members: - :show-inheritance: - diff --git a/packages/google-cloud-spanner/docs/spanner_v1/database.rst b/packages/google-cloud-spanner/docs/spanner_v1/database.rst deleted file mode 100644 index f1ce2a6d8e26..000000000000 --- a/packages/google-cloud-spanner/docs/spanner_v1/database.rst +++ /dev/null @@ -1,8 +0,0 @@ -Database API -============ - -.. automodule:: google.cloud.spanner_v1.database - :members: - :show-inheritance: - - diff --git a/packages/google-cloud-spanner/docs/spanner_v1/instance.rst b/packages/google-cloud-spanner/docs/spanner_v1/instance.rst deleted file mode 100644 index 127b4c687372..000000000000 --- a/packages/google-cloud-spanner/docs/spanner_v1/instance.rst +++ /dev/null @@ -1,8 +0,0 @@ -Instance API -============ - -.. automodule:: google.cloud.spanner_v1.instance - :members: - :show-inheritance: - - diff --git a/packages/google-cloud-spanner/docs/spanner_v1/keyset.rst b/packages/google-cloud-spanner/docs/spanner_v1/keyset.rst deleted file mode 100644 index 90137cf87640..000000000000 --- a/packages/google-cloud-spanner/docs/spanner_v1/keyset.rst +++ /dev/null @@ -1,8 +0,0 @@ -Keyset API -========== - -.. automodule:: google.cloud.spanner_v1.keyset - :members: - :show-inheritance: - - diff --git a/packages/google-cloud-spanner/docs/spanner_v1/session.rst b/packages/google-cloud-spanner/docs/spanner_v1/session.rst deleted file mode 100644 index 1f6d0ac60261..000000000000 --- a/packages/google-cloud-spanner/docs/spanner_v1/session.rst +++ /dev/null @@ -1,15 +0,0 @@ -Session API -=========== - -.. automodule:: google.cloud.spanner_v1.session - :members: - :show-inheritance: - - -Session Pools API -================= - -.. automodule:: google.cloud.spanner_v1.pool - :members: - :show-inheritance: - diff --git a/packages/google-cloud-spanner/docs/spanner_v1/snapshot.rst b/packages/google-cloud-spanner/docs/spanner_v1/snapshot.rst deleted file mode 100644 index ca37d8a2591a..000000000000 --- a/packages/google-cloud-spanner/docs/spanner_v1/snapshot.rst +++ /dev/null @@ -1,8 +0,0 @@ -Snapshot API -============ - -.. automodule:: google.cloud.spanner_v1.snapshot - :members: - :inherited-members: - - diff --git a/packages/google-cloud-spanner/docs/spanner_v1/streamed.rst b/packages/google-cloud-spanner/docs/spanner_v1/streamed.rst deleted file mode 100644 index 53bab89ba491..000000000000 --- a/packages/google-cloud-spanner/docs/spanner_v1/streamed.rst +++ /dev/null @@ -1,8 +0,0 @@ -StreamedResultSet API -===================== - -.. automodule:: google.cloud.spanner_v1.streamed - :members: - :show-inheritance: - - diff --git a/packages/google-cloud-spanner/docs/spanner_v1/table.rst b/packages/google-cloud-spanner/docs/spanner_v1/table.rst deleted file mode 100644 index 86b81dc86ea0..000000000000 --- a/packages/google-cloud-spanner/docs/spanner_v1/table.rst +++ /dev/null @@ -1,6 +0,0 @@ -Table API -========= - -.. automodule:: google.cloud.spanner_v1.table - :members: - :show-inheritance: diff --git a/packages/google-cloud-spanner/docs/spanner_v1/transaction.rst b/packages/google-cloud-spanner/docs/spanner_v1/transaction.rst deleted file mode 100644 index f7e8d4759aa9..000000000000 --- a/packages/google-cloud-spanner/docs/spanner_v1/transaction.rst +++ /dev/null @@ -1,8 +0,0 @@ -Transaction API -=============== - -.. automodule:: google.cloud.spanner_v1.transaction - :members: - :inherited-members: - - diff --git a/packages/google-cloud-spanner/samples/samples/archived/backup_snippet.py b/packages/google-cloud-spanner/samples/samples/archived/backup_snippet.py index f31cbc1f2c48..bdee010d5051 100644 --- a/packages/google-cloud-spanner/samples/samples/archived/backup_snippet.py +++ b/packages/google-cloud-spanner/samples/samples/archived/backup_snippet.py @@ -127,8 +127,7 @@ def create_backup_with_encryption_key( instance_id, database_id, backup_id, kms_key_name ): """Creates a backup for a database using a Customer Managed Encryption Key (CMEK).""" - from google.cloud.spanner_admin_database_v1 import \ - CreateBackupEncryptionConfig + from google.cloud.spanner_admin_database_v1 import CreateBackupEncryptionConfig spanner_client = spanner.Client() instance = spanner_client.instance(instance_id) @@ -187,8 +186,7 @@ def create_database_with_version_retention_period( + " AlbumTitle STRING(MAX)" + ") PRIMARY KEY (SingerId, AlbumId)," + " INTERLEAVE IN PARENT Singers ON DELETE CASCADE", - "ALTER DATABASE `{}`" - " SET OPTIONS (version_retention_period = '{}')".format( + "ALTER DATABASE `{}` SET OPTIONS (version_retention_period = '{}')".format( database_id, retention_period ), ] @@ -396,8 +394,7 @@ def restore_database_with_encryption_key( instance_id, new_database_id, backup_id, kms_key_name ): """Restores a database from a backup using a Customer Managed Encryption Key (CMEK).""" - from google.cloud.spanner_admin_database_v1 import \ - RestoreDatabaseEncryptionConfig + from google.cloud.spanner_admin_database_v1 import RestoreDatabaseEncryptionConfig spanner_client = spanner.Client() instance = spanner_client.instance(instance_id) diff --git a/packages/google-cloud-spanner/samples/samples/archived/backup_snippet_test.py b/packages/google-cloud-spanner/samples/samples/archived/backup_snippet_test.py index 888124ffad5d..e1c5251145c9 100644 --- a/packages/google-cloud-spanner/samples/samples/archived/backup_snippet_test.py +++ b/packages/google-cloud-spanner/samples/samples/archived/backup_snippet_test.py @@ -91,8 +91,10 @@ def test_create_backup_with_encryption_key( assert kms_key_name in out -@pytest.mark.skip(reason="same test passes on unarchived test suite, " - "but fails here. Needs investigation") +@pytest.mark.skip( + reason="same test passes on unarchived test suite, " + "but fails here. Needs investigation" +) @pytest.mark.dependency(depends=["create_backup"]) @RetryErrors(exception=DeadlineExceeded, max_tries=2) def test_restore_database(capsys, instance_id, sample_database): @@ -103,8 +105,10 @@ def test_restore_database(capsys, instance_id, sample_database): assert BACKUP_ID in out -@pytest.mark.skip(reason="same test passes on unarchived test suite, " - "but fails here. Needs investigation") +@pytest.mark.skip( + reason="same test passes on unarchived test suite, " + "but fails here. Needs investigation" +) @pytest.mark.dependency(depends=["create_backup_with_encryption_key"]) @RetryErrors(exception=DeadlineExceeded, max_tries=2) def test_restore_database_with_encryption_key( diff --git a/packages/google-cloud-spanner/samples/samples/archived/pg_samples.py b/packages/google-cloud-spanner/samples/samples/archived/pg_samples.py index 2d0dd0e5a983..2e66cbb7735c 100644 --- a/packages/google-cloud-spanner/samples/samples/archived/pg_samples.py +++ b/packages/google-cloud-spanner/samples/samples/archived/pg_samples.py @@ -18,6 +18,7 @@ Spanner PostgreSql dialect. For more information, see the README.rst under /spanner. """ + from google.cloud import spanner, spanner_admin_database_v1 from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect diff --git a/packages/google-cloud-spanner/samples/samples/archived/pg_samples_test.py b/packages/google-cloud-spanner/samples/samples/archived/pg_samples_test.py index 3863f5aa56a6..f64af67e0c34 100644 --- a/packages/google-cloud-spanner/samples/samples/archived/pg_samples_test.py +++ b/packages/google-cloud-spanner/samples/samples/archived/pg_samples_test.py @@ -17,9 +17,10 @@ import pg_samples as samples import pytest from google.api_core import exceptions -from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect from test_utils.retry import RetryErrors +from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect + CREATE_TABLE_SINGERS = """\ CREATE TABLE Singers ( SingerId BIGINT NOT NULL, diff --git a/packages/google-cloud-spanner/samples/samples/archived/samples.py b/packages/google-cloud-spanner/samples/samples/archived/samples.py index 0f930d4a356f..619c6ace65c9 100644 --- a/packages/google-cloud-spanner/samples/samples/archived/samples.py +++ b/packages/google-cloud-spanner/samples/samples/archived/samples.py @@ -22,10 +22,11 @@ import time -from google.cloud import spanner from google.iam.v1 import policy_pb2 from google.type import expr_pb2 +from google.cloud import spanner + OPERATION_TIMEOUT_SECONDS = 240 @@ -293,8 +294,9 @@ def create_database_with_default_leader(instance_id, database_id, default_leader AlbumTitle STRING(MAX) ) PRIMARY KEY (SingerId, AlbumId), INTERLEAVE IN PARENT Singers ON DELETE CASCADE""", - "ALTER DATABASE {}" - " SET OPTIONS (default_leader = '{}')".format(database_id, default_leader), + "ALTER DATABASE {} SET OPTIONS (default_leader = '{}')".format( + database_id, default_leader + ), ], ) operation = database.create() @@ -806,8 +808,9 @@ def update_database_with_default_leader(instance_id, database_id, default_leader operation = database.update_ddl( [ - "ALTER DATABASE {}" - " SET OPTIONS (default_leader = '{}')".format(database_id, default_leader) + "ALTER DATABASE {} SET OPTIONS (default_leader = '{}')".format( + database_id, default_leader + ) ] ) operation.result(OPERATION_TIMEOUT_SECONDS) diff --git a/packages/google-cloud-spanner/samples/samples/archived/samples_test.py b/packages/google-cloud-spanner/samples/samples/archived/samples_test.py index 6435dc531101..9a1d5063f76c 100644 --- a/packages/google-cloud-spanner/samples/samples/archived/samples_test.py +++ b/packages/google-cloud-spanner/samples/samples/archived/samples_test.py @@ -23,11 +23,11 @@ import pytest from google.api_core import exceptions -from google.cloud import spanner -from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect from test_utils.retry import RetryErrors import samples +from google.cloud import spanner +from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect CREATE_TABLE_SINGERS = """\ CREATE TABLE Singers ( diff --git a/packages/google-cloud-spanner/samples/samples/autocommit_test.py b/packages/google-cloud-spanner/samples/samples/autocommit_test.py index a22f74e6b44b..8150058f1c90 100644 --- a/packages/google-cloud-spanner/samples/samples/autocommit_test.py +++ b/packages/google-cloud-spanner/samples/samples/autocommit_test.py @@ -4,8 +4,8 @@ # license that can be found in the LICENSE file or at # https://developers.google.com/open-source/licenses/bsd -import pytest from google.api_core.exceptions import Aborted +import pytest from test_utils.retry import RetryErrors import autocommit diff --git a/packages/google-cloud-spanner/samples/samples/backup_sample.py b/packages/google-cloud-spanner/samples/samples/backup_sample.py index e984d3a11ea8..59c4bb71d816 100644 --- a/packages/google-cloud-spanner/samples/samples/backup_sample.py +++ b/packages/google-cloud-spanner/samples/samples/backup_sample.py @@ -631,8 +631,7 @@ def create_database_with_version_retention_period( + " AlbumTitle STRING(MAX)" + ") PRIMARY KEY (SingerId, AlbumId)," + " INTERLEAVE IN PARENT Singers ON DELETE CASCADE", - "ALTER DATABASE `{}`" - " SET OPTIONS (version_retention_period = '{}')".format( + "ALTER DATABASE `{}` SET OPTIONS (version_retention_period = '{}')".format( database_id, retention_period ), ] @@ -706,8 +705,8 @@ def copy_backup_with_multiple_kms_keys( ): """Copies a backup.""" - from google.cloud.spanner_admin_database_v1.types import backup as backup_pb from google.cloud.spanner_admin_database_v1 import CopyBackupEncryptionConfig + from google.cloud.spanner_admin_database_v1.types import backup as backup_pb spanner_client = spanner.Client() database_admin_api = spanner_client.database_admin_api diff --git a/packages/google-cloud-spanner/samples/samples/backup_sample_test.py b/packages/google-cloud-spanner/samples/samples/backup_sample_test.py index b588d5735b66..55e7bac1881e 100644 --- a/packages/google-cloud-spanner/samples/samples/backup_sample_test.py +++ b/packages/google-cloud-spanner/samples/samples/backup_sample_test.py @@ -93,7 +93,7 @@ def test_create_backup_with_encryption_key( assert kms_key_name in out -@pytest.mark.skip(reason="skipped since the KMS keys are not added on test " "project") +@pytest.mark.skip(reason="skipped since the KMS keys are not added on test project") @pytest.mark.dependency(name="create_backup_with_multiple_kms_keys") def test_create_backup_with_multiple_kms_keys( capsys, @@ -115,7 +115,7 @@ def test_create_backup_with_multiple_kms_keys( assert kms_key_names[2] in out -@pytest.mark.skip(reason="skipped since the KMS keys are not added on test " "project") +@pytest.mark.skip(reason="skipped since the KMS keys are not added on test project") @pytest.mark.dependency(depends=["create_backup_with_multiple_kms_keys"]) def test_copy_backup_with_multiple_kms_keys( capsys, multi_region_instance_id, spanner_client, kms_key_names @@ -162,7 +162,7 @@ def test_restore_database_with_encryption_key( assert kms_key_name in out -@pytest.mark.skip(reason="skipped since the KMS keys are not added on test " "project") +@pytest.mark.skip(reason="skipped since the KMS keys are not added on test project") @pytest.mark.dependency(depends=["create_backup_with_multiple_kms_keys"]) @RetryErrors(exception=DeadlineExceeded, max_tries=2) def test_restore_database_with_multiple_kms_keys( diff --git a/packages/google-cloud-spanner/samples/samples/backup_schedule_samples.py b/packages/google-cloud-spanner/samples/samples/backup_schedule_samples.py index c3c86b1538d7..53321fc7dd5b 100644 --- a/packages/google-cloud-spanner/samples/samples/backup_schedule_samples.py +++ b/packages/google-cloud-spanner/samples/samples/backup_schedule_samples.py @@ -18,7 +18,6 @@ """ import argparse - from enum import Enum @@ -29,14 +28,15 @@ def create_full_backup_schedule( schedule_id: str, ) -> None: from datetime import timedelta + from google.cloud import spanner - from google.cloud.spanner_admin_database_v1.types import ( - backup_schedule as backup_schedule_pb, - ) from google.cloud.spanner_admin_database_v1.types import ( CreateBackupEncryptionConfig, FullBackupSpec, ) + from google.cloud.spanner_admin_database_v1.types import ( + backup_schedule as backup_schedule_pb, + ) client = spanner.Client() database_admin_api = client.database_admin_api @@ -74,14 +74,15 @@ def create_incremental_backup_schedule( schedule_id: str, ) -> None: from datetime import timedelta + from google.cloud import spanner - from google.cloud.spanner_admin_database_v1.types import ( - backup_schedule as backup_schedule_pb, - ) from google.cloud.spanner_admin_database_v1.types import ( CreateBackupEncryptionConfig, IncrementalBackupSpec, ) + from google.cloud.spanner_admin_database_v1.types import ( + backup_schedule as backup_schedule_pb, + ) client = spanner.Client() database_admin_api = client.database_admin_api @@ -174,12 +175,13 @@ def update_backup_schedule( schedule_id: str, ) -> None: from datetime import timedelta + from google.cloud import spanner from google.cloud.spanner_admin_database_v1.types import ( - backup_schedule as backup_schedule_pb, + CreateBackupEncryptionConfig, ) from google.cloud.spanner_admin_database_v1.types import ( - CreateBackupEncryptionConfig, + backup_schedule as backup_schedule_pb, ) from google.protobuf.field_mask_pb2 import FieldMask diff --git a/packages/google-cloud-spanner/samples/samples/backup_schedule_samples_test.py b/packages/google-cloud-spanner/samples/samples/backup_schedule_samples_test.py index 6584d89701b2..8e263b6a4bdf 100644 --- a/packages/google-cloud-spanner/samples/samples/backup_schedule_samples_test.py +++ b/packages/google-cloud-spanner/samples/samples/backup_schedule_samples_test.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import backup_schedule_samples as samples -import pytest import uuid +import pytest + +import backup_schedule_samples as samples __FULL_BACKUP_SCHEDULE_ID = "full-backup-schedule" __INCREMENTAL_BACKUP_SCHEDULE_ID = "incremental-backup-schedule" diff --git a/packages/google-cloud-spanner/samples/samples/conftest.py b/packages/google-cloud-spanner/samples/samples/conftest.py index b34e9d16b12f..25671684cf6f 100644 --- a/packages/google-cloud-spanner/samples/samples/conftest.py +++ b/packages/google-cloud-spanner/samples/samples/conftest.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -""" Shared pytest fixtures.""" +"""Shared pytest fixtures.""" import time import uuid @@ -19,10 +19,10 @@ from google.api_core import exceptions from google.cloud import spanner_admin_database_v1 from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin from google.cloud.spanner_v1 import backup, client, database, instance import pytest from test_utils import retry -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin INSTANCE_CREATION_TIMEOUT = 560 # seconds diff --git a/packages/google-cloud-spanner/samples/samples/graph_snippets_test.py b/packages/google-cloud-spanner/samples/samples/graph_snippets_test.py index bd49260007ab..70e14e21ac84 100644 --- a/packages/google-cloud-spanner/samples/samples/graph_snippets_test.py +++ b/packages/google-cloud-spanner/samples/samples/graph_snippets_test.py @@ -14,11 +14,10 @@ # import time import uuid -import pytest from google.api_core import exceptions - from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect +import pytest from test_utils.retry import RetryErrors import graph_snippets diff --git a/packages/google-cloud-spanner/samples/samples/noxfile.py b/packages/google-cloud-spanner/samples/samples/noxfile.py index 719e13109909..4e5349e54102 100644 --- a/packages/google-cloud-spanner/samples/samples/noxfile.py +++ b/packages/google-cloud-spanner/samples/samples/noxfile.py @@ -22,7 +22,6 @@ import nox - # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING # DO NOT EDIT THIS FILE EVER! @@ -160,6 +159,7 @@ def blacken(session: nox.sessions.Session) -> None: # format = isort + black # + @nox.session def format(session: nox.sessions.Session) -> None: """ @@ -187,7 +187,9 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: @@ -209,9 +211,7 @@ def _session_tests( if os.path.exists("requirements-test.txt"): if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") else: session.install("-r", "requirements-test.txt") with open("requirements-test.txt") as rtfile: @@ -224,9 +224,9 @@ def _session_tests( post_install(session) if "pytest-parallel" in packages: - concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) elif "pytest-xdist" in packages: - concurrent_args.extend(['-n', 'auto']) + concurrent_args.extend(["-n", "auto"]) session.run( "pytest", @@ -256,7 +256,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/packages/google-cloud-spanner/samples/samples/pg_snippets.py b/packages/google-cloud-spanner/samples/samples/pg_snippets.py index 432d68a8ce5f..1c7e120f9c92 100644 --- a/packages/google-cloud-spanner/samples/samples/pg_snippets.py +++ b/packages/google-cloud-spanner/samples/samples/pg_snippets.py @@ -19,6 +19,7 @@ For more information, see the README.rst under /spanner. """ + import argparse import base64 import datetime @@ -491,7 +492,7 @@ def read_data_with_storing_index(instance_id, database_id): ) for row in results: - print("AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format(*row)) + print("AlbumId: {}, AlbumTitle: {}, MarketingBudget: {}".format(*row)) # [END spanner_postgresql_read_data_with_storing_index] @@ -600,7 +601,7 @@ def query_data_with_parameter(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT SingerId, FirstName, LastName FROM Singers " "WHERE LastName = $1", + "SELECT SingerId, FirstName, LastName FROM Singers WHERE LastName = $1", params={"p1": "Garcia"}, param_types={"p1": spanner.param_types.STRING}, ) @@ -624,7 +625,7 @@ def transfer_budget(transaction): # Transfer marketing budget from one album to another. Performed in a # single transaction to ensure that the transfer is atomic. second_album_result = transaction.execute_sql( - "SELECT MarketingBudget from Albums " "WHERE SingerId = 2 and AlbumId = 2" + "SELECT MarketingBudget from Albums WHERE SingerId = 2 and AlbumId = 2" ) second_album_row = list(second_album_result)[0] second_album_budget = second_album_row[0] @@ -636,8 +637,7 @@ def transfer_budget(transaction): # will be rerun by the client library if second_album_budget >= transfer_amount: first_album_result = transaction.execute_sql( - "SELECT MarketingBudget from Albums " - "WHERE SingerId = 1 and AlbumId = 1" + "SELECT MarketingBudget from Albums WHERE SingerId = 1 and AlbumId = 1" ) first_album_row = list(first_album_result)[0] first_album_budget = first_album_row[0] @@ -969,8 +969,7 @@ def delete_data_with_dml_returning(instance_id, database_id): # deleted records by using 'RETURNING *'. def delete_singers(transaction): results = transaction.execute_sql( - "DELETE FROM Singers WHERE FirstName = 'David' " - "RETURNING SingerId, FullName" + "DELETE FROM Singers WHERE FirstName = 'David' RETURNING SingerId, FullName" ) for result in results: print("SingerId: {}, FullName: {}".format(*result)) @@ -1226,7 +1225,7 @@ def query_data_with_bytes(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName FROM Venues " "WHERE VenueInfo = $1", + "SELECT VenueId, VenueName FROM Venues WHERE VenueInfo = $1", params=param, param_types=param_type, ) @@ -1277,7 +1276,7 @@ def query_data_with_int(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName, Capacity FROM Venues " "WHERE Capacity >= $1", + "SELECT VenueId, VenueName, Capacity FROM Venues WHERE Capacity >= $1", params=param, param_types=param_type, ) @@ -1302,7 +1301,7 @@ def query_data_with_string(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName FROM Venues " "WHERE VenueName = $1", + "SELECT VenueId, VenueName FROM Venues WHERE VenueName = $1", params=param, param_types=param_type, ) diff --git a/packages/google-cloud-spanner/samples/samples/pg_snippets_test.py b/packages/google-cloud-spanner/samples/samples/pg_snippets_test.py index 1b5d2971c19e..d4f08499d25a 100644 --- a/packages/google-cloud-spanner/samples/samples/pg_snippets_test.py +++ b/packages/google-cloud-spanner/samples/samples/pg_snippets_test.py @@ -15,9 +15,9 @@ import time import uuid -import pytest from google.api_core import exceptions from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect +import pytest from test_utils.retry import RetryErrors import pg_snippets as snippets diff --git a/packages/google-cloud-spanner/samples/samples/snippets.py b/packages/google-cloud-spanner/samples/samples/snippets.py index 96c00548525c..d3c89f236364 100644 --- a/packages/google-cloud-spanner/samples/samples/snippets.py +++ b/packages/google-cloud-spanner/samples/samples/snippets.py @@ -435,8 +435,9 @@ def create_database_with_default_leader(instance_id, database_id, default_leader AlbumTitle STRING(MAX) ) PRIMARY KEY (SingerId, AlbumId), INTERLEAVE IN PARENT Singers ON DELETE CASCADE""", - "ALTER DATABASE {}" - " SET OPTIONS (default_leader = '{}')".format(database_id, default_leader), + "ALTER DATABASE {} SET OPTIONS (default_leader = '{}')".format( + database_id, default_leader + ), ], ) operation = database_admin_api.create_database(request=request) @@ -467,8 +468,9 @@ def update_database_with_default_leader(instance_id, database_id, default_leader spanner_client.project, instance_id, database_id ), statements=[ - "ALTER DATABASE {}" - " SET OPTIONS (default_leader = '{}')".format(database_id, default_leader) + "ALTER DATABASE {} SET OPTIONS (default_leader = '{}')".format( + database_id, default_leader + ) ], ) operation = database_admin_api.update_database_ddl(request) @@ -811,7 +813,7 @@ def query_data_with_index( ) for row in results: - print("AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format(*row)) + print("AlbumId: {}, AlbumTitle: {}, MarketingBudget: {}".format(*row)) # [END spanner_query_data_with_index] @@ -905,7 +907,7 @@ def read_data_with_storing_index(instance_id, database_id): ) for row in results: - print("AlbumId: {}, AlbumTitle: {}, " "MarketingBudget: {}".format(*row)) + print("AlbumId: {}, AlbumTitle: {}, MarketingBudget: {}".format(*row)) # [END spanner_read_data_with_storing_index] @@ -1439,7 +1441,7 @@ def query_with_struct(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT SingerId FROM Singers WHERE " "(FirstName, LastName) = @name", + "SELECT SingerId FROM Singers WHERE (FirstName, LastName) = @name", params={"name": record_value}, param_types={"name": record_type}, ) @@ -1503,7 +1505,7 @@ def query_struct_field(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT SingerId FROM Singers " "WHERE FirstName = @name.FirstName", + "SELECT SingerId FROM Singers WHERE FirstName = @name.FirstName", params={"name": ("Elena", "Campbell")}, param_types={"name": name_type}, ) @@ -1924,7 +1926,7 @@ def transfer_budget(transaction): # Transfer marketing budget from one album to another. Performed in a # single transaction to ensure that the transfer is atomic. second_album_result = transaction.execute_sql( - "SELECT MarketingBudget from Albums " "WHERE SingerId = 2 and AlbumId = 2" + "SELECT MarketingBudget from Albums WHERE SingerId = 2 and AlbumId = 2" ) second_album_row = list(second_album_result)[0] second_album_budget = second_album_row[0] @@ -1936,8 +1938,7 @@ def transfer_budget(transaction): # will be rerun by the client library if second_album_budget >= transfer_amount: first_album_result = transaction.execute_sql( - "SELECT MarketingBudget from Albums " - "WHERE SingerId = 1 and AlbumId = 1" + "SELECT MarketingBudget from Albums WHERE SingerId = 1 and AlbumId = 1" ) first_album_row = list(first_album_result)[0] first_album_budget = first_album_row[0] @@ -2226,7 +2227,7 @@ def query_data_with_bytes(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName FROM Venues " "WHERE VenueInfo = @venue_info", + "SELECT VenueId, VenueName FROM Venues WHERE VenueInfo = @venue_info", params=param, param_types=param_type, ) @@ -2329,7 +2330,7 @@ def query_data_with_string(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, VenueName FROM Venues " "WHERE VenueName = @venue_name", + "SELECT VenueId, VenueName FROM Venues WHERE VenueName = @venue_name", params=param, param_types=param_type, ) @@ -2354,7 +2355,7 @@ def query_data_with_numeric_parameter(instance_id, database_id): with database.snapshot() as snapshot: results = snapshot.execute_sql( - "SELECT VenueId, Revenue FROM Venues " "WHERE Revenue < @revenue", + "SELECT VenueId, Revenue FROM Venues WHERE Revenue < @revenue", params=param, param_types=param_type, ) @@ -3192,7 +3193,7 @@ def isolation_level_options( # [START spanner_isolation_level] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" - from google.cloud.spanner_v1 import TransactionOptions, DefaultTransactionOptions + from google.cloud.spanner_v1 import DefaultTransactionOptions, TransactionOptions # The isolation level specified at the client-level will be applied to all RW transactions. isolation_options_for_client = TransactionOptions.IsolationLevel.SERIALIZABLE @@ -3241,11 +3242,13 @@ def read_lock_mode_options( # [START spanner_read_lock_mode] # instance_id = "your-spanner-instance" # database_id = "your-spanner-db-id" - from google.cloud.spanner_v1 import TransactionOptions, DefaultTransactionOptions + from google.cloud.spanner_v1 import DefaultTransactionOptions, TransactionOptions # The read lock mode specified at the client-level will be applied to all # RW transactions. - read_lock_mode_options_for_client = TransactionOptions.ReadWrite.ReadLockMode.OPTIMISTIC + read_lock_mode_options_for_client = ( + TransactionOptions.ReadWrite.ReadLockMode.OPTIMISTIC + ) # Create a client that uses Serializable isolation (default) with # optimistic locking for read-write transactions. @@ -3280,7 +3283,7 @@ def update_albums_with_read_lock_mode(transaction): database.run_in_transaction( update_albums_with_read_lock_mode, - read_lock_mode=read_lock_mode_options_for_transaction + read_lock_mode=read_lock_mode_options_for_transaction, ) # [END spanner_read_lock_mode] @@ -3909,9 +3912,7 @@ def add_split_points(instance_id, database_id): subparsers.add_parser( "isolation_level_options", help=isolation_level_options.__doc__ ) - subparsers.add_parser( - "read_lock_mode_options", help=read_lock_mode_options.__doc__ - ) + subparsers.add_parser("read_lock_mode_options", help=read_lock_mode_options.__doc__) subparsers.add_parser( "set_custom_timeout_and_retry", help=set_custom_timeout_and_retry.__doc__ ) diff --git a/packages/google-cloud-spanner/samples/samples/snippets_test.py b/packages/google-cloud-spanner/samples/samples/snippets_test.py index 3888bf012092..d1abeb4b277e 100644 --- a/packages/google-cloud-spanner/samples/samples/snippets_test.py +++ b/packages/google-cloud-spanner/samples/samples/snippets_test.py @@ -254,7 +254,7 @@ def test_create_database_with_encryption_config( assert kms_key_name in out -@pytest.mark.skip(reason="skipped since the KMS keys are not added on test " "project") +@pytest.mark.skip(reason="skipped since the KMS keys are not added on test project") def test_create_database_with_multiple_kms_keys( capsys, multi_region_instance, diff --git a/packages/google-cloud-spanner/samples/samples/testdata/singer_pb2.py b/packages/google-cloud-spanner/samples/samples/testdata/singer_pb2.py index 286f8841633a..c0b1711d20fa 100644 --- a/packages/google-cloud-spanner/samples/samples/testdata/singer_pb2.py +++ b/packages/google-cloud-spanner/samples/samples/testdata/singer_pb2.py @@ -1,28 +1,45 @@ # -*- coding: utf-8 -*- + +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by the protocol buffer compiler. DO NOT EDIT! # source: singer.proto # Protobuf Python Version: 4.25.1 """Generated protocol buffer code.""" + from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0csinger.proto\x12\x16\x65xamples.spanner.music\"\xc1\x01\n\nSingerInfo\x12\x16\n\tsinger_id\x18\x01 \x01(\x03H\x00\x88\x01\x01\x12\x17\n\nbirth_date\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0bnationality\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x31\n\x05genre\x18\x04 \x01(\x0e\x32\x1d.examples.spanner.music.GenreH\x03\x88\x01\x01\x42\x0c\n\n_singer_idB\r\n\x0b_birth_dateB\x0e\n\x0c_nationalityB\x08\n\x06_genre*.\n\x05Genre\x12\x07\n\x03POP\x10\x00\x12\x08\n\x04JAZZ\x10\x01\x12\x08\n\x04\x46OLK\x10\x02\x12\x08\n\x04ROCK\x10\x03\x62\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x0csinger.proto\x12\x16\x65xamples.spanner.music"\xc1\x01\n\nSingerInfo\x12\x16\n\tsinger_id\x18\x01 \x01(\x03H\x00\x88\x01\x01\x12\x17\n\nbirth_date\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x18\n\x0bnationality\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x31\n\x05genre\x18\x04 \x01(\x0e\x32\x1d.examples.spanner.music.GenreH\x03\x88\x01\x01\x42\x0c\n\n_singer_idB\r\n\x0b_birth_dateB\x0e\n\x0c_nationalityB\x08\n\x06_genre*.\n\x05Genre\x12\x07\n\x03POP\x10\x00\x12\x08\n\x04JAZZ\x10\x01\x12\x08\n\x04\x46OLK\x10\x02\x12\x08\n\x04ROCK\x10\x03\x62\x06proto3' +) _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'singer_pb2', _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "singer_pb2", _globals) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - _globals['_GENRE']._serialized_start=236 - _globals['_GENRE']._serialized_end=282 - _globals['_SINGERINFO']._serialized_start=41 - _globals['_SINGERINFO']._serialized_end=234 + DESCRIPTOR._options = None + _globals["_GENRE"]._serialized_start = 236 + _globals["_GENRE"]._serialized_end = 282 + _globals["_SINGERINFO"]._serialized_start = 41 + _globals["_SINGERINFO"]._serialized_end = 234 # @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-spanner/tests/system/testdata/singer_pb2.py b/packages/google-cloud-spanner/tests/system/testdata/singer_pb2.py index 92ab0c5f608c..c0b1711d20fa 100644 --- a/packages/google-cloud-spanner/tests/system/testdata/singer_pb2.py +++ b/packages/google-cloud-spanner/tests/system/testdata/singer_pb2.py @@ -1,4 +1,19 @@ # -*- coding: utf-8 -*- + +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by the protocol buffer compiler. DO NOT EDIT! # source: singer.proto # Protobuf Python Version: 4.25.1 diff --git a/packages/google-cloud-spanner/tests/unit/gapic/conftest.py b/packages/google-cloud-spanner/tests/unit/gapic/conftest.py deleted file mode 100644 index 22ba265871d4..000000000000 --- a/packages/google-cloud-spanner/tests/unit/gapic/conftest.py +++ /dev/null @@ -1,20 +0,0 @@ -import asyncio -import sys - -import pytest - - -@pytest.fixture(autouse=True) -def provide_loop_to_sync_grpc_tests(): - """ - GAPIC creates synchronous methods testing Asyncio transports. - If no global loop exists, `grpc.aio` engine crashes during initialization. - """ - try: - loop = asyncio.get_event_loop() - except RuntimeError: - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - - yield - # No close here, just ensure existance diff --git a/packages/google-cloud-spanner/tests/unit/testdata/singer_pb2.py b/packages/google-cloud-spanner/tests/unit/testdata/singer_pb2.py index 92ab0c5f608c..c0b1711d20fa 100644 --- a/packages/google-cloud-spanner/tests/unit/testdata/singer_pb2.py +++ b/packages/google-cloud-spanner/tests/unit/testdata/singer_pb2.py @@ -1,4 +1,19 @@ # -*- coding: utf-8 -*- + +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by the protocol buffer compiler. DO NOT EDIT! # source: singer.proto # Protobuf Python Version: 4.25.1 diff --git a/packages/google-cloud-speech/.repo-metadata.json b/packages/google-cloud-speech/.repo-metadata.json index 8c6c93d42987..f91fcfe41b3f 100644 --- a/packages/google-cloud-speech/.repo-metadata.json +++ b/packages/google-cloud-speech/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "enables easy integration of Google speech recognition technologies into developer applications. Send audio and receive a text transcription from the Speech-to-Text API service.", - "api_id": "speech.googleapis.com", - "api_shortname": "speech", - "client_documentation": "https://cloud.google.com/python/docs/reference/speech/latest", - "default_version": "v1", - "distribution_name": "google-cloud-speech", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559758", - "language": "python", - "library_type": "GAPIC_COMBO", - "name": "speech", - "name_pretty": "Cloud Speech", - "product_documentation": "https://cloud.google.com/speech-to-text/docs/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": false + "api_description": "enables easy integration of Google speech recognition technologies into developer applications. Send audio and receive a text transcription from the Speech-to-Text API service.", + "api_id": "speech.googleapis.com", + "api_shortname": "speech", + "client_documentation": "https://cloud.google.com/python/docs/reference/speech/latest", + "default_version": "v1", + "distribution_name": "google-cloud-speech", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559758", + "language": "python", + "library_type": "GAPIC_COMBO", + "name": "speech", + "name_pretty": "Cloud Speech", + "product_documentation": "https://cloud.google.com/speech-to-text/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-speech/google/cloud/speech/__init__.py b/packages/google-cloud-speech/google/cloud/speech/__init__.py index 956f1b3e33af..6c130e32fd60 100644 --- a/packages/google-cloud-speech/google/cloud/speech/__init__.py +++ b/packages/google-cloud-speech/google/cloud/speech/__init__.py @@ -18,12 +18,12 @@ __version__ = package_version.__version__ +from google.cloud.speech_v1 import SpeechClient from google.cloud.speech_v1.services.adaptation.async_client import ( AdaptationAsyncClient, ) from google.cloud.speech_v1.services.adaptation.client import AdaptationClient from google.cloud.speech_v1.services.speech.async_client import SpeechAsyncClient -from google.cloud.speech_v1 import SpeechClient from google.cloud.speech_v1.types.cloud_speech import ( LongRunningRecognizeMetadata, LongRunningRecognizeRequest, diff --git a/packages/google-cloud-storage-control/.repo-metadata.json b/packages/google-cloud-storage-control/.repo-metadata.json index d15aabe56c50..3c352c6c6c1f 100644 --- a/packages/google-cloud-storage-control/.repo-metadata.json +++ b/packages/google-cloud-storage-control/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Lets you perform metadata-specific, control plane, and long-running operations apart from the Storage API. Separating these operations from the Storage API improves API standardization and lets you run faster releases.", - "api_id": "storage.googleapis.com", - "api_shortname": "storage", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-storage-control/latest", - "default_version": "v2", - "distribution_name": "google-cloud-storage-control", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=187243&template=1162869", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-storage-control", - "name_pretty": "Storage Control API", - "product_documentation": "https://cloud.google.com/storage/docs/reference/rpc/google.storage.control.v2", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Lets you perform metadata-specific, control plane, and long-running operations apart from the Storage API. Separating these operations from the Storage API improves API standardization and lets you run faster releases.", + "api_id": "storage.googleapis.com", + "api_shortname": "storage", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-storage-control/latest", + "default_version": "v2", + "distribution_name": "google-cloud-storage-control", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=187243\u0026template=1162869", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-storage-control", + "name_pretty": "Storage Control API", + "product_documentation": "https://cloud.google.com/storage/docs/reference/rpc/google.storage.control.v2", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-storage-transfer/.repo-metadata.json b/packages/google-cloud-storage-transfer/.repo-metadata.json index a65be542b1ea..db05c7027798 100644 --- a/packages/google-cloud-storage-transfer/.repo-metadata.json +++ b/packages/google-cloud-storage-transfer/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Secure, low-cost services for transferring data from cloud or on-premises sources.", - "api_id": "storagetransfer.googleapis.com", - "api_shortname": "storagetransfer", - "client_documentation": "https://cloud.google.com/python/docs/reference/storagetransfer/latest", - "default_version": "v1", - "distribution_name": "google-cloud-storage-transfer", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "storagetransfer", - "name_pretty": "Storage Transfer Service", - "product_documentation": "https://cloud.google.com/storage-transfer/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Secure, low-cost services for transferring data from cloud or on-premises sources.", + "api_id": "storagetransfer.googleapis.com", + "api_shortname": "storagetransfer", + "client_documentation": "https://cloud.google.com/python/docs/reference/storagetransfer/latest", + "default_version": "v1", + "distribution_name": "google-cloud-storage-transfer", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "storagetransfer", + "name_pretty": "Storage Transfer Service", + "product_documentation": "https://cloud.google.com/storage-transfer/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-storage/.repo-metadata.json b/packages/google-cloud-storage/.repo-metadata.json index fa2d5fa6c12e..68f6c33d2e25 100644 --- a/packages/google-cloud-storage/.repo-metadata.json +++ b/packages/google-cloud-storage/.repo-metadata.json @@ -1,18 +1,16 @@ { - "name": "storage", - "name_pretty": "Google Cloud Storage", - "product_documentation": "https://cloud.google.com/storage", + "api_description": "is a durable and highly available object storage service. Google Cloud Storage is almost infinitely scalable and guarantees consistency: when a write succeeds, the latest copy of the object will be returned to any GET, globally.", + "api_id": "storage.googleapis.com", + "api_shortname": "storage", "client_documentation": "https://cloud.google.com/python/docs/reference/storage/latest", + "default_version": "v2", + "distribution_name": "google-cloud-storage", "issue_tracker": "https://issuetracker.google.com/savedsearches/559782", - "release_level": "stable", "language": "python", "library_type": "GAPIC_MANUAL", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-storage", - "api_id": "storage.googleapis.com", - "requires_billing": true, - "default_version": "v2", - "codeowner_team": "@googleapis/cloud-sdk-python-team @googleapis/gcs-team @googleapis/gcs-fs", - "api_shortname": "storage", - "api_description": "is a durable and highly available object storage service. Google Cloud Storage is almost infinitely scalable and guarantees consistency: when a write succeeds, the latest copy of the object will be returned to any GET, globally." -} + "name": "storage", + "name_pretty": "Google Cloud Storage", + "product_documentation": "https://cloud.google.com/storage", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-storage/google/cloud/_storage_v2/types/storage.py b/packages/google-cloud-storage/google/cloud/_storage_v2/types/storage.py index 8d326c0936f7..f5678132c6bc 100644 --- a/packages/google-cloud-storage/google/cloud/_storage_v2/types/storage.py +++ b/packages/google-cloud-storage/google/cloud/_storage_v2/types/storage.py @@ -1798,6 +1798,9 @@ class WriteObjectResponse(proto.Message): finalized. This field is a member of `oneof`_ ``write_status``. + persisted_data_checksums (google.cloud._storage_v2.types.ObjectChecksums): + If persisted_size is set, contains checksums of persisted + data. """ persisted_size: int = proto.Field( @@ -1811,6 +1814,11 @@ class WriteObjectResponse(proto.Message): oneof="write_status", message="Object", ) + persisted_data_checksums: "ObjectChecksums" = proto.Field( + proto.MESSAGE, + number=3, + message="ObjectChecksums", + ) class AppendObjectSpec(proto.Message): @@ -1950,8 +1958,9 @@ class BidiWriteObjectRequest(proto.Message): object_checksums (google.cloud._storage_v2.types.ObjectChecksums): Optional. Checksums for the complete object. If the checksums computed by the service don't match the specified - checksums the call fails. Might only be provided in the - first request or the last request (with finish_write set). + checksums the call fails. May be provided in the last + request (with finish_write set). For non-appendable objects + only, may also be provided in the first request. state_lookup (bool): Optional. For each ``BidiWriteObjectRequest`` where ``state_lookup`` is ``true`` or the client closes the @@ -2058,6 +2067,9 @@ class BidiWriteObjectResponse(proto.Message): finalized. This field is a member of `oneof`_ ``write_status``. + persisted_data_checksums (google.cloud._storage_v2.types.ObjectChecksums): + If persisted_size is set, contains checksums of persisted + data. write_handle (google.cloud._storage_v2.types.BidiWriteHandle): An optional write handle that is returned periodically in response messages. Clients @@ -2078,6 +2090,11 @@ class BidiWriteObjectResponse(proto.Message): oneof="write_status", message="Object", ) + persisted_data_checksums: "ObjectChecksums" = proto.Field( + proto.MESSAGE, + number=4, + message="ObjectChecksums", + ) write_handle: "BidiWriteHandle" = proto.Field( proto.MESSAGE, number=3, @@ -2276,6 +2293,9 @@ class QueryWriteStatusResponse(proto.Message): finalized. This field is a member of `oneof`_ ``write_status``. + persisted_data_checksums (google.cloud._storage_v2.types.ObjectChecksums): + If persisted_size is set, contains checksums of persisted + data. """ persisted_size: int = proto.Field( @@ -2289,6 +2309,11 @@ class QueryWriteStatusResponse(proto.Message): oneof="write_status", message="Object", ) + persisted_data_checksums: "ObjectChecksums" = proto.Field( + proto.MESSAGE, + number=3, + message="ObjectChecksums", + ) class RewriteObjectRequest(proto.Message): diff --git a/packages/google-cloud-storagebatchoperations/.repo-metadata.json b/packages/google-cloud-storagebatchoperations/.repo-metadata.json index fe0580d2fa5a..a25cbb5e5ce6 100644 --- a/packages/google-cloud-storagebatchoperations/.repo-metadata.json +++ b/packages/google-cloud-storagebatchoperations/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "null ", - "api_id": "storagebatchoperations.googleapis.com", - "api_shortname": "storagebatchoperations", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-storagebatchoperations/latest", - "default_version": "v1", - "distribution_name": "google-cloud-storagebatchoperations", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=815827&template=1395449", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-storagebatchoperations", - "name_pretty": "Storage Batch Operations API", - "product_documentation": "https://cloud.google.com/storage/docs/batch-operations/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "null ", + "api_id": "storagebatchoperations.googleapis.com", + "api_shortname": "storagebatchoperations", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-storagebatchoperations/latest", + "default_version": "v1", + "distribution_name": "google-cloud-storagebatchoperations", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=815827\u0026template=1395449", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-storagebatchoperations", + "name_pretty": "Storage Batch Operations API", + "product_documentation": "https://cloud.google.com/storage/docs/batch-operations/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-storageinsights/.repo-metadata.json b/packages/google-cloud-storageinsights/.repo-metadata.json index 42bba677514e..bc2f34963375 100644 --- a/packages/google-cloud-storageinsights/.repo-metadata.json +++ b/packages/google-cloud-storageinsights/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "The Storage Insights inventory report feature helps you manage your object storage at scale.", - "api_id": "storageinsights.googleapis.com", - "api_shortname": "storageinsights", - "client_documentation": "https://cloud.google.com/python/docs/reference/storageinsights/latest", - "default_version": "v1", - "distribution_name": "google-cloud-storageinsights", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1156610", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "storageinsights", - "name_pretty": "Storage Insights API", - "product_documentation": "https://cloud.google.com/storage/docs/insights/storage-insights", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "The Storage Insights inventory report feature helps you manage your object storage at scale.", + "api_id": "storageinsights.googleapis.com", + "api_shortname": "storageinsights", + "client_documentation": "https://cloud.google.com/python/docs/reference/storageinsights/latest", + "default_version": "v1", + "distribution_name": "google-cloud-storageinsights", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1156610", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "storageinsights", + "name_pretty": "Storage Insights API", + "product_documentation": "https://cloud.google.com/storage/docs/insights/storage-insights", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-support/.repo-metadata.json b/packages/google-cloud-support/.repo-metadata.json index 5f3c9209244a..608712826a2f 100644 --- a/packages/google-cloud-support/.repo-metadata.json +++ b/packages/google-cloud-support/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Manages Google Cloud technical support cases for Customer Care support offerings.", - "api_id": "cloudsupport.googleapis.com", - "api_shortname": "support", - "client_documentation": "https://cloud.google.com/python/docs/reference/support/latest", - "default_version": "v2", - "distribution_name": "google-cloud-support", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "support", - "name_pretty": "Google Cloud Support API", - "product_documentation": "https://cloud.google.com/support/docs/reference/support-api", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Manages Google Cloud technical support cases for Customer Care support offerings.", + "api_id": "cloudsupport.googleapis.com", + "api_shortname": "support", + "client_documentation": "https://cloud.google.com/python/docs/reference/support/latest", + "default_version": "v2", + "distribution_name": "google-cloud-support", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "support", + "name_pretty": "Google Cloud Support API", + "product_documentation": "https://cloud.google.com/support/docs/reference/support-api", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-talent/.repo-metadata.json b/packages/google-cloud-talent/.repo-metadata.json index e9f67520370d..60ee958e113f 100644 --- a/packages/google-cloud-talent/.repo-metadata.json +++ b/packages/google-cloud-talent/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "Cloud Talent Solution provides the capability to create, read, update, and delete job postings, as well as search jobs based on keywords and filters.", - "api_id": "jobs.googleapis.com", - "api_shortname": "jobs", - "client_documentation": "https://cloud.google.com/python/docs/reference/talent/latest", - "default_version": "v4", - "distribution_name": "google-cloud-talent", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559664", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "talent", - "name_pretty": "Talent Solution", - "product_documentation": "https://cloud.google.com/solutions/talent-solution/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "Cloud Talent Solution provides the capability to create, read, update, and delete job postings, as well as search jobs based on keywords and filters.", + "api_id": "jobs.googleapis.com", + "api_shortname": "jobs", + "client_documentation": "https://cloud.google.com/python/docs/reference/talent/latest", + "default_version": "v4", + "distribution_name": "google-cloud-talent", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559664", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "talent", + "name_pretty": "Talent Solution", + "product_documentation": "https://cloud.google.com/solutions/talent-solution/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-tasks/.repo-metadata.json b/packages/google-cloud-tasks/.repo-metadata.json index 9009e4f1a9c4..88a3d7efb00c 100644 --- a/packages/google-cloud-tasks/.repo-metadata.json +++ b/packages/google-cloud-tasks/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "a fully managed service that allows you to manage the execution, dispatch and delivery of a large number of distributed tasks. You can asynchronously perform work outside of a user request. Your tasks can be executed on App Engine or any arbitrary HTTP endpoint.", - "api_id": "cloudtasks.googleapis.com", - "api_shortname": "cloudtasks", - "client_documentation": "https://cloud.google.com/python/docs/reference/cloudtasks/latest", - "default_version": "v2", - "distribution_name": "google-cloud-tasks", - "issue_tracker": "https://issuetracker.google.com/savedsearches/5433985", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "cloudtasks", - "name_pretty": "Cloud Tasks", - "product_documentation": "https://cloud.google.com/tasks/docs/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "a fully managed service that allows you to manage the execution, dispatch and delivery of a large number of distributed tasks. You can asynchronously perform work outside of a user request. Your tasks can be executed on App Engine or any arbitrary HTTP endpoint.", + "api_id": "cloudtasks.googleapis.com", + "api_shortname": "cloudtasks", + "client_documentation": "https://cloud.google.com/python/docs/reference/cloudtasks/latest", + "default_version": "v2", + "distribution_name": "google-cloud-tasks", + "issue_tracker": "https://issuetracker.google.com/savedsearches/5433985", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "cloudtasks", + "name_pretty": "Cloud Tasks", + "product_documentation": "https://cloud.google.com/tasks/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-telcoautomation/.repo-metadata.json b/packages/google-cloud-telcoautomation/.repo-metadata.json index 7b5ff0531c8e..943e963485c8 100644 --- a/packages/google-cloud-telcoautomation/.repo-metadata.json +++ b/packages/google-cloud-telcoautomation/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "APIs to automate 5G deployment and management of cloud infrastructure and network functions.", - "api_id": "telcoautomation.googleapis.com", - "api_shortname": "telcoautomation", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-telcoautomation/latest", - "default_version": "v1", - "distribution_name": "google-cloud-telcoautomation", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=190865&template=1161103", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-telcoautomation", - "name_pretty": "Telco Automation API", - "product_documentation": "https://cloud.google.com/telecom-network-automation", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "APIs to automate 5G deployment and management of cloud infrastructure and network functions.", + "api_id": "telcoautomation.googleapis.com", + "api_shortname": "telcoautomation", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-telcoautomation/latest", + "default_version": "v1", + "distribution_name": "google-cloud-telcoautomation", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=190865\u0026template=1161103", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-telcoautomation", + "name_pretty": "Telco Automation API", + "product_documentation": "https://cloud.google.com/telecom-network-automation", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-testutils/.repo-metadata.json b/packages/google-cloud-testutils/.repo-metadata.json index b4153e7e0e71..f6706146f556 100644 --- a/packages/google-cloud-testutils/.repo-metadata.json +++ b/packages/google-cloud-testutils/.repo-metadata.json @@ -1,14 +1,11 @@ { - "name": "google-cloud-test-utils", - "name_pretty": "Python Test Utils for Google Cloud", - "product_documentation": "", "client_documentation": "https://github.com/googleapis/google-cloud-python/packages/google-cloud-testutils", + "distribution_name": "google-cloud-testutils", "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "release_level": "preview", "language": "python", "library_type": "OTHER", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-cloud-testutils", - "default_version": "", - "codeowner_team": "" -} + "name": "google-cloud-test-utils", + "name_pretty": "Python Test Utils for Google Cloud", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-texttospeech/.repo-metadata.json b/packages/google-cloud-texttospeech/.repo-metadata.json index 9c46e36ee84b..347bb24b89e1 100644 --- a/packages/google-cloud-texttospeech/.repo-metadata.json +++ b/packages/google-cloud-texttospeech/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "enables easy integration of Google text recognition technologies into developer applications. Send text and receive synthesized audio output from the Cloud Text-to-Speech API service.", - "api_id": "texttospeech.googleapis.com", - "api_shortname": "texttospeech", - "client_documentation": "https://cloud.google.com/python/docs/reference/texttospeech/latest", - "default_version": "v1", - "distribution_name": "google-cloud-texttospeech", - "issue_tracker": "https://issuetracker.google.com/savedsearches/5235428", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "texttospeech", - "name_pretty": "Google Cloud Text-to-Speech", - "product_documentation": "https://cloud.google.com/text-to-speech", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "enables easy integration of Google text recognition technologies into developer applications. Send text and receive synthesized audio output from the Cloud Text-to-Speech API service.", + "api_id": "texttospeech.googleapis.com", + "api_shortname": "texttospeech", + "client_documentation": "https://cloud.google.com/python/docs/reference/texttospeech/latest", + "default_version": "v1", + "distribution_name": "google-cloud-texttospeech", + "issue_tracker": "https://issuetracker.google.com/savedsearches/5235428", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "texttospeech", + "name_pretty": "Google Cloud Text-to-Speech", + "product_documentation": "https://cloud.google.com/text-to-speech", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-tpu/.repo-metadata.json b/packages/google-cloud-tpu/.repo-metadata.json index 349227ffcb2a..82c0864f024b 100644 --- a/packages/google-cloud-tpu/.repo-metadata.json +++ b/packages/google-cloud-tpu/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "Cloud Tensor Processing Units (TPUs) are Google's custom-developed application-specific integrated circuits (ASICs) used to accelerate machine learning workloads.", - "api_id": "tpu.googleapis.com", - "api_shortname": "tpu", - "client_documentation": "https://cloud.google.com/python/docs/reference/tpu/latest", - "default_version": "v1", - "distribution_name": "google-cloud-tpu", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "tpu", - "name_pretty": "Cloud TPU", - "product_documentation": "https://cloud.google.com/tpu/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Cloud Tensor Processing Units (TPUs) are Google's custom-developed application-specific integrated circuits (ASICs) used to accelerate machine learning workloads.", + "api_id": "tpu.googleapis.com", + "api_shortname": "tpu", + "client_documentation": "https://cloud.google.com/python/docs/reference/tpu/latest", + "default_version": "v1", + "distribution_name": "google-cloud-tpu", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "tpu", + "name_pretty": "Cloud TPU", + "product_documentation": "https://cloud.google.com/tpu/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-trace/.repo-metadata.json b/packages/google-cloud-trace/.repo-metadata.json index 26e7fea13ad2..3484c6f2df80 100644 --- a/packages/google-cloud-trace/.repo-metadata.json +++ b/packages/google-cloud-trace/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "is a distributed tracing system that collects latency data from your applications and displays it in the Google Cloud Platform Console. You can track how requests propagate through your application and receive detailed near real-time performance insights.", - "api_id": "cloudtrace.googleapis.com", - "api_shortname": "cloudtrace", - "client_documentation": "https://cloud.google.com/python/docs/reference/cloudtrace/latest", - "default_version": "v2", - "distribution_name": "google-cloud-trace", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559776", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "cloudtrace", - "name_pretty": "Cloud Trace", - "product_documentation": "https://cloud.google.com/trace/docs", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": false + "api_description": "is a distributed tracing system that collects latency data from your applications and displays it in the Google Cloud Platform Console. You can track how requests propagate through your application and receive detailed near real-time performance insights.", + "api_id": "cloudtrace.googleapis.com", + "api_shortname": "cloudtrace", + "client_documentation": "https://cloud.google.com/python/docs/reference/cloudtrace/latest", + "default_version": "v2", + "distribution_name": "google-cloud-trace", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559776", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "cloudtrace", + "name_pretty": "Cloud Trace", + "product_documentation": "https://cloud.google.com/trace/docs", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-translate/.repo-metadata.json b/packages/google-cloud-translate/.repo-metadata.json index 0e84e5654cee..b9c4bea819c6 100644 --- a/packages/google-cloud-translate/.repo-metadata.json +++ b/packages/google-cloud-translate/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "can dynamically translate text between thousands of language pairs. Translation lets websites and programs programmatically integrate with the translation service.", - "api_id": "translate.googleapis.com", - "api_shortname": "translate", - "client_documentation": "https://cloud.google.com/python/docs/reference/translate/latest", - "default_version": "v3", - "distribution_name": "google-cloud-translate", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559749", - "language": "python", - "library_type": "GAPIC_COMBO", - "name": "translate", - "name_pretty": "Cloud Translation", - "product_documentation": "https://cloud.google.com/translate/docs/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "can dynamically translate text between thousands of language pairs. Translation lets websites and programs programmatically integrate with the translation service.", + "api_id": "translate.googleapis.com", + "api_shortname": "translate", + "client_documentation": "https://cloud.google.com/python/docs/reference/translate/latest", + "default_version": "v3", + "distribution_name": "google-cloud-translate", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559749", + "language": "python", + "library_type": "GAPIC_COMBO", + "name": "translate", + "name_pretty": "Cloud Translation", + "product_documentation": "https://cloud.google.com/translate/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-translate/google/cloud/translate_v3/types/adaptive_mt.py b/packages/google-cloud-translate/google/cloud/translate_v3/types/adaptive_mt.py index d1ffc1cd470f..d849156745ee 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3/types/adaptive_mt.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3/types/adaptive_mt.py @@ -252,6 +252,8 @@ class AdaptiveMtTranslateRequest(proto.Message): content (MutableSequence[str]): Required. The content of the input in string format. + mime_type (str): + The format of the source text. reference_sentence_config (google.cloud.translate_v3.types.AdaptiveMtTranslateRequest.ReferenceSentenceConfig): Configuration for caller provided reference sentences. @@ -382,6 +384,10 @@ class GlossaryConfig(proto.Message): proto.STRING, number=3, ) + mime_type: str = proto.Field( + proto.STRING, + number=4, + ) reference_sentence_config: ReferenceSentenceConfig = proto.Field( proto.MESSAGE, number=6, diff --git a/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py b/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py index 82e3d8518e63..0f5530ca5d6c 100644 --- a/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py +++ b/packages/google-cloud-translate/tests/unit/gapic/translate_v3/test_translation_service.py @@ -10742,6 +10742,7 @@ def test_adaptive_mt_translate_non_empty_request_with_auto_populated_field(): request = adaptive_mt.AdaptiveMtTranslateRequest( parent="parent_value", dataset="dataset_value", + mime_type="mime_type_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -10757,6 +10758,7 @@ def test_adaptive_mt_translate_non_empty_request_with_auto_populated_field(): assert args[0] == adaptive_mt.AdaptiveMtTranslateRequest( parent="parent_value", dataset="dataset_value", + mime_type="mime_type_value", ) diff --git a/packages/google-cloud-vectorsearch/.repo-metadata.json b/packages/google-cloud-vectorsearch/.repo-metadata.json index 8482c4805565..131ffd9899ea 100644 --- a/packages/google-cloud-vectorsearch/.repo-metadata.json +++ b/packages/google-cloud-vectorsearch/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "The Vector Search API provides a fully-managed, highly performant, and\nscalable vector database designed to power next-generation search,\nrecommendation, and generative AI applications. It allows you to store,\nindex, and query your data and its corresponding vector embeddings through\na simple, intuitive interface. With Vector Search, you can define custom\nschemas for your data, insert objects with associated metadata,\nautomatically generate embeddings from your data, and perform fast\napproximate nearest neighbor (ANN) searches to find semantically similar\nitems at scale.", - "api_id": "vectorsearch.googleapis.com", - "api_shortname": "vectorsearch", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-vectorsearch/latest", - "default_version": "v1", - "distribution_name": "google-cloud-vectorsearch", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1899904", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-vectorsearch", - "name_pretty": "Vector Search API", - "product_documentation": "https://docs.cloud.google.com/vertex-ai/docs/vector-search-2/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" -} + "api_description": "The Vector Search API provides a fully-managed, highly performant, and\nscalable vector database designed to power next-generation search,\nrecommendation, and generative AI applications. It allows you to store,\nindex, and query your data and its corresponding vector embeddings through\na simple, intuitive interface. With Vector Search, you can define custom\nschemas for your data, insert objects with associated metadata,\nautomatically generate embeddings from your data, and perform fast\napproximate nearest neighbor (ANN) searches to find semantically similar\nitems at scale.", + "api_id": "vectorsearch.googleapis.com", + "api_shortname": "vectorsearch", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-vectorsearch/latest", + "default_version": "v1", + "distribution_name": "google-cloud-vectorsearch", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1899904", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-vectorsearch", + "name_pretty": "Vector Search API", + "product_documentation": "https://docs.cloud.google.com/vertex-ai/docs/vector-search-2/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-video-live-stream/.repo-metadata.json b/packages/google-cloud-video-live-stream/.repo-metadata.json index 99e2382c609d..a4cd03f3a84a 100644 --- a/packages/google-cloud-video-live-stream/.repo-metadata.json +++ b/packages/google-cloud-video-live-stream/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "transcodes mezzanine live signals into direct-to-consumer streaming formats, including Dynamic Adaptive Streaming over HTTP (DASH/MPEG-DASH), and HTTP Live Streaming (HLS), for multiple device platforms.", - "api_id": "livestream.googleapis.com", - "api_shortname": "livestream", - "client_documentation": "https://cloud.google.com/python/docs/reference/livestream/latest", - "default_version": "v1", - "distribution_name": "google-cloud-video-live-stream", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "livestream", - "name_pretty": "Live Stream", - "product_documentation": "https://cloud.google.com/livestream/docs", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "transcodes mezzanine live signals into direct-to-consumer streaming formats, including Dynamic Adaptive Streaming over HTTP (DASH/MPEG-DASH), and HTTP Live Streaming (HLS), for multiple device platforms.", + "api_id": "livestream.googleapis.com", + "api_shortname": "livestream", + "client_documentation": "https://cloud.google.com/python/docs/reference/livestream/latest", + "default_version": "v1", + "distribution_name": "google-cloud-video-live-stream", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "livestream", + "name_pretty": "Live Stream", + "product_documentation": "https://cloud.google.com/livestream/docs", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-video-stitcher/.repo-metadata.json b/packages/google-cloud-video-stitcher/.repo-metadata.json index 8daa41f02366..d134b753cca6 100644 --- a/packages/google-cloud-video-stitcher/.repo-metadata.json +++ b/packages/google-cloud-video-stitcher/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "The Video Stitcher API helps you generate dynamic content for delivery to client devices. You can call the Video Stitcher API from your servers to dynamically insert ads into video-on-demand and livestreams for your users.", - "api_id": "videostitcher.googleapis.com", - "api_shortname": "videostitcher", - "client_documentation": "https://cloud.google.com/python/docs/reference/videostitcher/latest", - "default_version": "v1", - "distribution_name": "google-cloud-video-stitcher", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "videostitcher", - "name_pretty": "Video Stitcher", - "product_documentation": "https://cloud.google.com/video-stitcher", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "The Video Stitcher API helps you generate dynamic content for delivery to client devices. You can call the Video Stitcher API from your servers to dynamically insert ads into video-on-demand and livestreams for your users.", + "api_id": "videostitcher.googleapis.com", + "api_shortname": "videostitcher", + "client_documentation": "https://cloud.google.com/python/docs/reference/videostitcher/latest", + "default_version": "v1", + "distribution_name": "google-cloud-video-stitcher", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "videostitcher", + "name_pretty": "Video Stitcher", + "product_documentation": "https://cloud.google.com/video-stitcher", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-video-transcoder/.repo-metadata.json b/packages/google-cloud-video-transcoder/.repo-metadata.json index e9214ff8fa53..dd3c658150f1 100644 --- a/packages/google-cloud-video-transcoder/.repo-metadata.json +++ b/packages/google-cloud-video-transcoder/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "allows you to transcode videos into a variety of formats. The Transcoder API benefits broadcasters, production companies, businesses, and individuals looking to transform their video content for use across a variety of user devices.", - "api_id": "transcoder.googleapis.com", - "api_shortname": "transcoder", - "client_documentation": "https://cloud.google.com/python/docs/reference/transcoder/latest", - "default_version": "v1", - "distribution_name": "google-cloud-video-transcoder", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "transcoder", - "name_pretty": "Transcoder", - "product_documentation": "https://cloud.google.com/transcoder", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "allows you to transcode videos into a variety of formats. The Transcoder API benefits broadcasters, production companies, businesses, and individuals looking to transform their video content for use across a variety of user devices.", + "api_id": "transcoder.googleapis.com", + "api_shortname": "transcoder", + "client_documentation": "https://cloud.google.com/python/docs/reference/transcoder/latest", + "default_version": "v1", + "distribution_name": "google-cloud-video-transcoder", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "transcoder", + "name_pretty": "Transcoder", + "product_documentation": "https://cloud.google.com/transcoder", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-videointelligence/.repo-metadata.json b/packages/google-cloud-videointelligence/.repo-metadata.json index 5e6b7aa831e8..05dcb3213efe 100644 --- a/packages/google-cloud-videointelligence/.repo-metadata.json +++ b/packages/google-cloud-videointelligence/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "makes videos searchable, and discoverable, by extracting metadata with an easy to use API. You can now search every moment of every video file in your catalog and find every occurrence as well as its significance. It quickly annotates videos stored in Google Cloud Storage, and helps you identify key nouns entities of your video, and when they occur within the video. Separate signal from noise, by retrieving relevant information at the video, shot or per frame.", - "api_id": "videointelligence.googleapis.com", - "api_shortname": "videointelligence", - "client_documentation": "https://cloud.google.com/python/docs/reference/videointelligence/latest", - "default_version": "v1", - "distribution_name": "google-cloud-videointelligence", - "issue_tracker": "https://issuetracker.google.com/savedsearches/5084810", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "videointelligence", - "name_pretty": "Video Intelligence", - "product_documentation": "https://cloud.google.com/video-intelligence/docs/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "makes videos searchable, and discoverable, by extracting metadata with an easy to use API. You can now search every moment of every video file in your catalog and find every occurrence as well as its significance. It quickly annotates videos stored in Google Cloud Storage, and helps you identify key nouns entities of your video, and when they occur within the video. Separate signal from noise, by retrieving relevant information at the video, shot or per frame.", + "api_id": "videointelligence.googleapis.com", + "api_shortname": "videointelligence", + "client_documentation": "https://cloud.google.com/python/docs/reference/videointelligence/latest", + "default_version": "v1", + "distribution_name": "google-cloud-videointelligence", + "issue_tracker": "https://issuetracker.google.com/savedsearches/5084810", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "videointelligence", + "name_pretty": "Video Intelligence", + "product_documentation": "https://cloud.google.com/video-intelligence/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-vision/.repo-metadata.json b/packages/google-cloud-vision/.repo-metadata.json index 4ca2e400a56f..255b0b251950 100644 --- a/packages/google-cloud-vision/.repo-metadata.json +++ b/packages/google-cloud-vision/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "allows developers to easily integrate vision detection features within applications, including image labeling, face and landmark detection, optical character recognition (OCR), and tagging of explicit content.", - "api_id": "vision.googleapis.com", - "api_shortname": "vision", - "client_documentation": "https://cloud.google.com/python/docs/reference/vision/latest", - "default_version": "v1", - "distribution_name": "google-cloud-vision", - "issue_tracker": "https://issuetracker.google.com/issues?q=status:open%20componentid:187174", - "language": "python", - "library_type": "GAPIC_COMBO", - "name": "vision", - "name_pretty": "Cloud Vision", - "product_documentation": "https://cloud.google.com/vision/docs/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": false + "api_description": "allows developers to easily integrate vision detection features within applications, including image labeling, face and landmark detection, optical character recognition (OCR), and tagging of explicit content.", + "api_id": "vision.googleapis.com", + "api_shortname": "vision", + "client_documentation": "https://cloud.google.com/python/docs/reference/vision/latest", + "default_version": "v1", + "distribution_name": "google-cloud-vision", + "issue_tracker": "https://issuetracker.google.com/issues?q=status:open%20componentid:187174", + "language": "python", + "library_type": "GAPIC_COMBO", + "name": "vision", + "name_pretty": "Cloud Vision", + "product_documentation": "https://cloud.google.com/vision/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-vision/google/cloud/vision/__init__.py b/packages/google-cloud-vision/google/cloud/vision/__init__.py index 271a36da1815..27def75b91c6 100644 --- a/packages/google-cloud-vision/google/cloud/vision/__init__.py +++ b/packages/google-cloud-vision/google/cloud/vision/__init__.py @@ -18,10 +18,10 @@ __version__ = package_version.__version__ +from google.cloud.vision_v1 import ImageAnnotatorClient from google.cloud.vision_v1.services.image_annotator.async_client import ( ImageAnnotatorAsyncClient, ) -from google.cloud.vision_v1 import ImageAnnotatorClient from google.cloud.vision_v1.services.product_search.async_client import ( ProductSearchAsyncClient, ) diff --git a/packages/google-cloud-visionai/.repo-metadata.json b/packages/google-cloud-visionai/.repo-metadata.json index 794d5c7c5fc8..bab5fca5f7fb 100644 --- a/packages/google-cloud-visionai/.repo-metadata.json +++ b/packages/google-cloud-visionai/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Easily build and deploy Vertex AI Vision applications using a single platform.", - "api_id": "visionai.googleapis.com", - "api_shortname": "visionai", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-visionai/latest", - "default_version": "v1", - "distribution_name": "google-cloud-visionai", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=187174&pli=1&template=1161261", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-visionai", - "name_pretty": "Vision AI API", - "product_documentation": "https://cloud.google.com/vision-ai/docs", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Easily build and deploy Vertex AI Vision applications using a single platform.", + "api_id": "visionai.googleapis.com", + "api_shortname": "visionai", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-visionai/latest", + "default_version": "v1", + "distribution_name": "google-cloud-visionai", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=187174\u0026pli=1\u0026template=1161261", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-visionai", + "name_pretty": "Vision AI API", + "product_documentation": "https://cloud.google.com/vision-ai/docs", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-vm-migration/.repo-metadata.json b/packages/google-cloud-vm-migration/.repo-metadata.json index f0edb7b45e28..7cd912a7c1fa 100644 --- a/packages/google-cloud-vm-migration/.repo-metadata.json +++ b/packages/google-cloud-vm-migration/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": " for Compute Engine migrates VMs from your on-premises data center into Compute Engine.", - "api_id": "vmmigration.googleapis.com", - "api_shortname": "vmmigration", - "client_documentation": "https://cloud.google.com/python/docs/reference/vmmigration/latest", - "default_version": "v1", - "distribution_name": "google-cloud-vm-migration", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "vmmigration", - "name_pretty": "Cloud VM Migration", - "product_documentation": "https://cloud.google.com/migrate/compute-engine/docs", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": " for Compute Engine migrates VMs from your on-premises data center into Compute Engine.", + "api_id": "vmmigration.googleapis.com", + "api_shortname": "vmmigration", + "client_documentation": "https://cloud.google.com/python/docs/reference/vmmigration/latest", + "default_version": "v1", + "distribution_name": "google-cloud-vm-migration", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "vmmigration", + "name_pretty": "Cloud VM Migration", + "product_documentation": "https://cloud.google.com/migrate/compute-engine/docs", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-vmwareengine/.repo-metadata.json b/packages/google-cloud-vmwareengine/.repo-metadata.json index a1f8f2f4ad61..2d2bcee232fc 100644 --- a/packages/google-cloud-vmwareengine/.repo-metadata.json +++ b/packages/google-cloud-vmwareengine/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "", - "api_id": "vmwareengine.googleapis.com", - "api_shortname": "vmwareengine", - "client_documentation": "https://cloud.google.com/python/docs/reference/vmwareengine/latest", - "default_version": "v1", - "distribution_name": "google-cloud-vmwareengine", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "vmwareengine", - "name_pretty": "Google Cloud VMware Engine", - "product_documentation": "https://cloud.google.com/vmware-engine/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "The Google VMware Engine API lets you programmatically manage VMware\nenvironments.", + "api_id": "vmwareengine.googleapis.com", + "api_shortname": "vmwareengine", + "client_documentation": "https://cloud.google.com/python/docs/reference/vmwareengine/latest", + "default_version": "v1", + "distribution_name": "google-cloud-vmwareengine", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "vmwareengine", + "name_pretty": "Google Cloud VMware Engine", + "product_documentation": "https://cloud.google.com/vmware-engine/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-vmwareengine/README.rst b/packages/google-cloud-vmwareengine/README.rst index 5f6b30a4d861..a9d8e8e2afde 100644 --- a/packages/google-cloud-vmwareengine/README.rst +++ b/packages/google-cloud-vmwareengine/README.rst @@ -3,7 +3,8 @@ Python Client for Google Cloud VMware Engine |preview| |pypi| |versions| -`Google Cloud VMware Engine`_: +`Google Cloud VMware Engine`_: The Google VMware Engine API lets you programmatically manage VMware +environments. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-vmwareengine/docs/README.rst b/packages/google-cloud-vmwareengine/docs/README.rst index 5f6b30a4d861..a9d8e8e2afde 100644 --- a/packages/google-cloud-vmwareengine/docs/README.rst +++ b/packages/google-cloud-vmwareengine/docs/README.rst @@ -3,7 +3,8 @@ Python Client for Google Cloud VMware Engine |preview| |pypi| |versions| -`Google Cloud VMware Engine`_: +`Google Cloud VMware Engine`_: The Google VMware Engine API lets you programmatically manage VMware +environments. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-vpc-access/.repo-metadata.json b/packages/google-cloud-vpc-access/.repo-metadata.json index 74277930028d..b0a73974a3b6 100644 --- a/packages/google-cloud-vpc-access/.repo-metadata.json +++ b/packages/google-cloud-vpc-access/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "provides networking functionality to Compute Engine virtual machine (VM) instances, Google Kubernetes Engine (GKE) containers, and the App Engine flexible environment. VPC provides networking for your cloud-based services that is global, scalable, and flexible.", - "api_id": "vpcaccess.googleapis.com", - "api_shortname": "vpcaccess", - "client_documentation": "https://cloud.google.com/python/docs/reference/vpcaccess/latest", - "default_version": "v1", - "distribution_name": "google-cloud-vpc-access", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "vpcaccess", - "name_pretty": "Virtual Private Cloud", - "product_documentation": "https://cloud.google.com/vpc/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "provides networking functionality to Compute Engine virtual machine (VM) instances, Google Kubernetes Engine (GKE) containers, and the App Engine flexible environment. VPC provides networking for your cloud-based services that is global, scalable, and flexible.", + "api_id": "vpcaccess.googleapis.com", + "api_shortname": "vpcaccess", + "client_documentation": "https://cloud.google.com/python/docs/reference/vpcaccess/latest", + "default_version": "v1", + "distribution_name": "google-cloud-vpc-access", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "vpcaccess", + "name_pretty": "Virtual Private Cloud", + "product_documentation": "https://cloud.google.com/vpc/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-webrisk/.repo-metadata.json b/packages/google-cloud-webrisk/.repo-metadata.json index d9558c5a393a..5dee903e9f94 100644 --- a/packages/google-cloud-webrisk/.repo-metadata.json +++ b/packages/google-cloud-webrisk/.repo-metadata.json @@ -1,17 +1,15 @@ { - "api_description": "is a Google Cloud service that lets client applications check URLs against Google's constantly updated lists of unsafe web resources. Unsafe web resources include social engineering sites—such as phishing and deceptive sites—and sites that host malware or unwanted software. With the Web Risk API, you can quickly identify known bad sites, warn users before they click infected links, and prevent users from posting links to known infected pages from your site. The Web Risk API includes data on more than a million unsafe URLs and stays up to date by examining billions of URLs each day.", - "api_id": "webrisk.googleapis.com", - "api_shortname": "webrisk", - "client_documentation": "https://cloud.google.com/python/docs/reference/webrisk/latest", - "default_version": "v1", - "distribution_name": "google-cloud-webrisk", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "webrisk", - "name_pretty": "Web Risk", - "product_documentation": "https://cloud.google.com/web-risk/docs/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "is a Google Cloud service that lets client applications check URLs against Google's constantly updated lists of unsafe web resources. Unsafe web resources include social engineering sites—such as phishing and deceptive sites—and sites that host malware or unwanted software. With the Web Risk API, you can quickly identify known bad sites, warn users before they click infected links, and prevent users from posting links to known infected pages from your site. The Web Risk API includes data on more than a million unsafe URLs and stays up to date by examining billions of URLs each day.", + "api_id": "webrisk.googleapis.com", + "api_shortname": "webrisk", + "client_documentation": "https://cloud.google.com/python/docs/reference/webrisk/latest", + "default_version": "v1", + "distribution_name": "google-cloud-webrisk", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "webrisk", + "name_pretty": "Web Risk", + "product_documentation": "https://cloud.google.com/web-risk/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-websecurityscanner/.repo-metadata.json b/packages/google-cloud-websecurityscanner/.repo-metadata.json index 10f7445633b0..82966e22f1d0 100644 --- a/packages/google-cloud-websecurityscanner/.repo-metadata.json +++ b/packages/google-cloud-websecurityscanner/.repo-metadata.json @@ -1,17 +1,16 @@ { - "api_description": "identifies security vulnerabilities in your App Engine, Compute Engine, and Google Kubernetes Engine web applications. It crawls your application, following all links within the scope of your starting URLs, and attempts to exercise as many user inputs and event handlers as possible.", - "api_id": "securitycenter.googleapis.com", - "api_shortname": "securitycenter", - "client_documentation": "https://cloud.google.com/python/docs/reference/websecurityscanner/latest", - "default_version": "v1", - "distribution_name": "google-cloud-websecurityscanner", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559748", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "websecurityscanner", - "name_pretty": "Cloud Security Scanner", - "product_documentation": "https://cloud.google.com/security-scanner/docs/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": true + "api_description": "identifies security vulnerabilities in your App Engine, Compute Engine, and Google Kubernetes Engine web applications. It crawls your application, following all links within the scope of your starting URLs, and attempts to exercise as many user inputs and event handlers as possible.", + "api_id": "securitycenter.googleapis.com", + "api_shortname": "securitycenter", + "client_documentation": "https://cloud.google.com/python/docs/reference/websecurityscanner/latest", + "default_version": "v1", + "distribution_name": "google-cloud-websecurityscanner", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559748", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "websecurityscanner", + "name_pretty": "Cloud Security Scanner", + "product_documentation": "https://cloud.google.com/security-scanner/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-workflows/.repo-metadata.json b/packages/google-cloud-workflows/.repo-metadata.json index cf45a12f3c96..31863aaa98f6 100644 --- a/packages/google-cloud-workflows/.repo-metadata.json +++ b/packages/google-cloud-workflows/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Orchestrate and automate Google Cloud and HTTP-based API services with serverless workflows.", - "api_id": "workflows.googleapis.com", - "api_shortname": "workflows", - "client_documentation": "https://cloud.google.com/python/docs/reference/workflows/latest", - "default_version": "v1", - "distribution_name": "google-cloud-workflows", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559729", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "workflows", - "name_pretty": "Cloud Workflows", - "product_documentation": "https://cloud.google.com/workflows/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Orchestrate and automate Google Cloud and HTTP-based API services with serverless workflows.", + "api_id": "workflows.googleapis.com", + "api_shortname": "workflows", + "client_documentation": "https://cloud.google.com/python/docs/reference/workflows/latest", + "default_version": "v1", + "distribution_name": "google-cloud-workflows", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559729", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "workflows", + "name_pretty": "Cloud Workflows", + "product_documentation": "https://cloud.google.com/workflows/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-workloadmanager/.repo-metadata.json b/packages/google-cloud-workloadmanager/.repo-metadata.json index 3697c5ad06f4..b17c40660536 100644 --- a/packages/google-cloud-workloadmanager/.repo-metadata.json +++ b/packages/google-cloud-workloadmanager/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Workload Manager is a service that provides tooling for enterprise\nworkloads to automate the deployment and validation of your workloads\nagainst best practices and recommendations.", - "api_id": "workloadmanager.googleapis.com", - "api_shortname": "workloadmanager", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-workloadmanager/latest", - "default_version": "v1", - "distribution_name": "google-cloud-workloadmanager", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1631482&template=0", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-cloud-workloadmanager", - "name_pretty": "Workload Manager API", - "product_documentation": "https://docs.cloud.google.com/workload-manager/docs", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" -} + "api_description": "Workload Manager is a service that provides tooling for enterprise\nworkloads to automate the deployment and validation of your workloads\nagainst best practices and recommendations.", + "api_id": "workloadmanager.googleapis.com", + "api_shortname": "workloadmanager", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-workloadmanager/latest", + "default_version": "v1", + "distribution_name": "google-cloud-workloadmanager", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1631482\u0026template=0", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-cloud-workloadmanager", + "name_pretty": "Workload Manager API", + "product_documentation": "https://docs.cloud.google.com/workload-manager/docs", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-cloud-workstations/.repo-metadata.json b/packages/google-cloud-workstations/.repo-metadata.json index 2b8a0b16066f..e7dae459e478 100644 --- a/packages/google-cloud-workstations/.repo-metadata.json +++ b/packages/google-cloud-workstations/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "", - "api_id": "workstations.googleapis.com", - "api_shortname": "workstations", - "client_documentation": "https://cloud.google.com/python/docs/reference/workstations/latest", - "default_version": "v1", - "distribution_name": "google-cloud-workstations", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "workstations", - "name_pretty": "Cloud Workstations", - "product_documentation": "https://cloud.google.com/workstations/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Allows administrators to create managed developer environments in the cloud.", + "api_id": "workstations.googleapis.com", + "api_shortname": "workstations", + "client_documentation": "https://cloud.google.com/python/docs/reference/workstations/latest", + "default_version": "v1", + "distribution_name": "google-cloud-workstations", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "workstations", + "name_pretty": "Cloud Workstations", + "product_documentation": "https://cloud.google.com/workstations/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-cloud-workstations/README.rst b/packages/google-cloud-workstations/README.rst index fa32bd04f737..69d61d9f341c 100644 --- a/packages/google-cloud-workstations/README.rst +++ b/packages/google-cloud-workstations/README.rst @@ -3,7 +3,7 @@ Python Client for Cloud Workstations |preview| |pypi| |versions| -`Cloud Workstations`_: +`Cloud Workstations`_: Allows administrators to create managed developer environments in the cloud. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-cloud-workstations/docs/README.rst b/packages/google-cloud-workstations/docs/README.rst index fa32bd04f737..69d61d9f341c 100644 --- a/packages/google-cloud-workstations/docs/README.rst +++ b/packages/google-cloud-workstations/docs/README.rst @@ -3,7 +3,7 @@ Python Client for Cloud Workstations |preview| |pypi| |versions| -`Cloud Workstations`_: +`Cloud Workstations`_: Allows administrators to create managed developer environments in the cloud. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-crc32c/.repo-metadata.json b/packages/google-crc32c/.repo-metadata.json index f853e7b029b4..aaf9133c5b40 100644 --- a/packages/google-crc32c/.repo-metadata.json +++ b/packages/google-crc32c/.repo-metadata.json @@ -1,14 +1,11 @@ { - "name": "google-crc32c", - "name_pretty": "A python wrapper of the C library 'Google CRC32C'", - "product_documentation": "", "client_documentation": "https://github.com/googleapis/python-crc32c", + "distribution_name": "google-crc32c", "issue_tracker": "https://github.com/googleapis/python-crc32c/issues", - "release_level": "stable", "language": "python", "library_type": "OTHER", - "repo": "googleapis/google-cloud-python", - "distribution_name": "google-crc32c", - "default_version": "", - "codeowner_team": "" -} + "name": "google-crc32c", + "name_pretty": "A python wrapper of the C library 'Google CRC32C'", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-crc32c/scripts/check_crc32c_extension.py b/packages/google-crc32c/scripts/check_crc32c_extension.py deleted file mode 100644 index dc4a85aa349f..000000000000 --- a/packages/google-crc32c/scripts/check_crc32c_extension.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google_crc32c import _crc32c - - -def main(): - print("_crc32c: {}".format(_crc32c)) - print("dir(_crc32c): {}".format(dir(_crc32c))) - - -if __name__ == "__main__": - main() diff --git a/packages/google-crc32c/scripts/dev-requirements.txt b/packages/google-crc32c/scripts/dev-requirements.txt deleted file mode 100644 index 7479544b333a..000000000000 --- a/packages/google-crc32c/scripts/dev-requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -auditwheel >= 2.1.1; sys_platform == 'linux' or sys_platform == 'linux2' -delocate >= 0.8.0; sys_platform == 'darwin' -setuptools >= 42.0.2 -# See: https://github.com/pypa/auditwheel/issues/102 -wheel >= 0.34 diff --git a/packages/google-crc32c/scripts/local-linux/build_libcrc32c.sh b/packages/google-crc32c/scripts/local-linux/build_libcrc32c.sh deleted file mode 100755 index 705f24771903..000000000000 --- a/packages/google-crc32c/scripts/local-linux/build_libcrc32c.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -e -x - -PY_BIN=${PY_BIN:-python3.9} -REPO_ROOT=${REPO_ROOT:-$(pwd)} - -CRC32C_INSTALL_PREFIX=${REPO_ROOT}/usr - -# NOTE: This assumes the local install has an up-to-date `pip`. -# Create a virtualenv where we can install `cmake`. -VENV=${REPO_ROOT}/venv -${PY_BIN} -m venv ${VENV} -${VENV}/bin/python -m pip install --upgrade setuptools pip wheel -${VENV}/bin/python -m pip install "cmake >= 3.12.0" -rm -rf ${REPO_ROOT}/build -rm -rf ${CRC32C_INSTALL_PREFIX} -# Build `libcrc32c` -cd ${REPO_ROOT}/google_crc32c -rm -rf build -mkdir build -cd build/ -${VENV}/bin/cmake \ - -DCMAKE_BUILD_TYPE=Release \ - -DCRC32C_BUILD_TESTS=no \ - -DCRC32C_BUILD_BENCHMARKS=no \ - -DBUILD_SHARED_LIBS=yes \ - -DCMAKE_INSTALL_PREFIX:PATH=${CRC32C_INSTALL_PREFIX} \ - .. -# Install `libcrc32c` into CRC32C_INSTALL_PREFIX. -make all install - -cd ${REPO_ROOT} - -${VENV}/bin/python setup.py build_ext \ - --include-dirs=${REPO_ROOT}/usr/include \ - --library-dirs=${REPO_ROOT}/usr/lib \ - --rpath=${REPO_ROOT}/usr/lib -${VENV}/bin/python -m pip wheel . --wheel-dir=wheels - -# Clean up. -rm -fr ${REPO_ROOT}/google_crc32c/build -rm -fr ${VENV} diff --git a/packages/google-crc32c/scripts/manylinux/build.sh b/packages/google-crc32c/scripts/manylinux/build.sh deleted file mode 100755 index 484e2a4160c8..000000000000 --- a/packages/google-crc32c/scripts/manylinux/build.sh +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/bash -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -e -x -echo "BUILDING ON LINUX" -export BUILD_PYTHON=${BUILD_PYTHON} - -MANYLINUX_DIR=$(echo $(cd $(dirname ${0}); pwd)) -SCRIPTS_DIR=$(dirname ${MANYLINUX_DIR}) -REPO_ROOT=$(dirname ${SCRIPTS_DIR}) - -sudo apt-get install -y software-properties-common -sudo add-apt-repository -y ppa:deadsnakes/ppa -sudo apt-get update -sudo apt-get install -y python3.12 - -cd $REPO_ROOT -# Add directory as safe to avoid "detected dubious ownership" fatal issue1 -git config --global --add safe.directory '*' -git submodule update --init --recursive - - - -docker pull quay.io/pypa/manylinux2014_x86_64 -docker run \ - --rm \ - --interactive \ - --volume ${REPO_ROOT}:/var/code/python-crc32c/ \ - --env BUILD_PYTHON=${BUILD_PYTHON} \ - quay.io/pypa/manylinux2014_x86_64 \ - /var/code/python-crc32c/scripts/manylinux/build_on_centos.sh - -docker run --rm --privileged hypriot/qemu-register -docker pull quay.io/pypa/manylinux2014_aarch64 -docker run \ - --rm \ - --interactive \ - --volume ${REPO_ROOT}:/var/code/python-crc32c/ \ - --env BUILD_PYTHON=${BUILD_PYTHON} \ - quay.io/pypa/manylinux2014_aarch64 \ - /var/code/python-crc32c/scripts/manylinux/build_on_centos.sh - -if [[ "${PUBLISH_WHEELS}" == "true" ]]; then - . /${MANYLINUX_DIR}/publish_python_wheel.sh -fi diff --git a/packages/google-crc32c/scripts/manylinux/build_on_centos.sh b/packages/google-crc32c/scripts/manylinux/build_on_centos.sh deleted file mode 100755 index 9507dda09262..000000000000 --- a/packages/google-crc32c/scripts/manylinux/build_on_centos.sh +++ /dev/null @@ -1,119 +0,0 @@ -#!/bin/bash -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -e -x -MAIN_PYTHON_BIN="/opt/python/cp39-cp39/bin/" -echo "BUILD_PYTHON: ${BUILD_PYTHON}" -REPO_ROOT=/var/code/python-crc32c/ - -# Install `openssl-devel` so that `cmake` can be built. -yum install -y openssl-devel - -# Upgrade `pip` before using it. -${MAIN_PYTHON_BIN}/python -m pip install --upgrade pip -# Install `cmake` (i.e. non-Python build dependency). -${MAIN_PYTHON_BIN}/python -m pip install "cmake >= 3.12.0" -# Install Python build dependencies. -${MAIN_PYTHON_BIN}/python -m pip install \ - --requirement ${REPO_ROOT}/scripts/dev-requirements.txt - -# Build and install `crc32c` -cd ${REPO_ROOT}/google_crc32c/ -rm -rf build -mkdir build -cd build/ -cmake \ - -DCMAKE_BUILD_TYPE=Release \ - -DCRC32C_BUILD_TESTS=no \ - -DCRC32C_BUILD_BENCHMARKS=no \ - -DBUILD_SHARED_LIBS=yes \ - -DCMAKE_POLICY_VERSION_MINIMUM=3.12 \ - .. -make all install - -PYTHON_VERSIONS="" -if [[ -z ${BUILD_PYTHON} ]]; then - # Collect all target Python versions. - for PYTHON_BIN in /opt/python/*/bin; do - # H/T: https://stackoverflow.com/a/229606/1068170 - if [[ "${PYTHON_BIN}" == *"39"* ]]; then - PYTHON_VERSIONS="${PYTHON_VERSIONS} ${PYTHON_BIN}" - continue - elif [[ "${PYTHON_BIN}" == *"310"* ]]; then - PYTHON_VERSIONS="${PYTHON_VERSIONS} ${PYTHON_BIN}" - continue - elif [[ "${PYTHON_BIN}" == *"311"* ]]; then - PYTHON_VERSIONS="${PYTHON_VERSIONS} ${PYTHON_BIN}" - continue - elif [[ "${PYTHON_BIN}" == *"312"* ]]; then - PYTHON_VERSIONS="${PYTHON_VERSIONS} ${PYTHON_BIN}" - continue - elif [[ "${PYTHON_BIN}" == *"313"* && "${PYTHON_BIN}" != *"313t"* ]]; then - PYTHON_VERSIONS="${PYTHON_VERSIONS} ${PYTHON_BIN}" - continue - elif [[ "${PYTHON_BIN}" == *"314"* && "${PYTHON_BIN}" != *"314t"* ]]; then - PYTHON_VERSIONS="${PYTHON_VERSIONS} ${PYTHON_BIN}" - continue - else - echo "Ignoring unsupported version: ${PYTHON_BIN}" - echo "=====================================" - fi - done -else - STRIPPED_PYTHON=$(echo ${BUILD_PYTHON} | sed -e "s/\.//g" | sed -e "s/-dev$//") - for PYTHON_BIN in /opt/python/*/bin; do - if [[ "${PYTHON_BIN}" == *"${STRIPPED_PYTHON}"* ]]; then - PYTHON_VERSIONS="${PYTHON_VERSIONS} ${PYTHON_BIN}" - fi - done -fi - -# Build the wheels. -cd ${REPO_ROOT} -for PYTHON_BIN in ${PYTHON_VERSIONS}; do - ${PYTHON_BIN}/python -m pip install --upgrade pip - ${PYTHON_BIN}/python -m pip install \ - --requirement ${REPO_ROOT}/scripts/dev-requirements.txt - ${PYTHON_BIN}/python -m pip wheel . --wheel-dir dist_wheels/ -done - -# Bundle external shared libraries into the wheels -for whl in dist_wheels/google_crc32c*.whl; do - "${MAIN_PYTHON_BIN}/auditwheel" repair "${whl}" --wheel-dir wheels/ -done - -# Install and test wheels -for PYTHON_BIN in ${PYTHON_VERSIONS}; do - # Identify the short python version e.g. "39", "310" - # Get the ABI tag from the Python binary's path, e.g., "cp310-cp310" - ABI_TAG=$(basename $(dirname ${PYTHON_BIN})) - ARCH=$(uname -m) - # Create a virtual environment to install and test the wheel - ${PYTHON_BIN}/python -m venv /tmp/venv - - # Find the correct wheel file using the precise ABI tag and architecture. - WHEEL_FILE=$(ls ${REPO_ROOT}/wheels/google_crc32c-*-${ABI_TAG}-*manylinux*${ARCH}*.whl) # Install the wheel - /tmp/venv/bin/pip install "${WHEEL_FILE}" - - # Verify that the module is installed and peek at contents. - /tmp/venv/bin/python ${REPO_ROOT}/scripts/check_crc32c_extension.py - - # Clean up the virtual environment - rm -rf /tmp/venv -done - -# Clean up. -rm -fr ${REPO_ROOT}/google_crc32c/build/ -rm -fr ${REPO_ROOT}/dist_wheels/ diff --git a/packages/google-crc32c/scripts/manylinux/publish_python_wheel.sh b/packages/google-crc32c/scripts/manylinux/publish_python_wheel.sh deleted file mode 100755 index 7b5bdf6d3520..000000000000 --- a/packages/google-crc32c/scripts/manylinux/publish_python_wheel.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -python -m pip install "setuptools<71" - -# Start the releasetool reporter -python -m pip install --require-hashes -r ${REPO_ROOT}/.kokoro/requirements.txt -python -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -ls ${REPO_ROOT}/wheels/ -# Disable logging -set +x -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-3") -python -m twine upload --skip-existing --username __token__ --password "${TWINE_PASSWORD}" ${REPO_ROOT}/wheels/* diff --git a/packages/google-crc32c/scripts/osx/build.sh b/packages/google-crc32c/scripts/osx/build.sh deleted file mode 100755 index 075ffcc26193..000000000000 --- a/packages/google-crc32c/scripts/osx/build.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/bin/bash -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -e -x -echo "BUILDING FOR OSX" - -# set deployment target -export MACOSX_DEPLOYMENT_TARGET=12 - -# ``readlink -f`` is not our friend on OS X. This relies on **some** -# ``python`` being installed. -SCRIPT_FI=$(python3 -c "import os; print(os.path.realpath('${0}'))") -OSX_DIR=$(dirname ${SCRIPT_FI}) -SCRIPTS_DIR=$(dirname ${OSX_DIR}) -export REPO_ROOT=$(dirname ${SCRIPTS_DIR}) - -# Build and install `libcrc32c` -export PY_BIN="python3" -export CRC32C_INSTALL_PREFIX="${REPO_ROOT}/usr" - -cd ${REPO_ROOT} -# Add directory as safe to avoid "detected dubious ownership" fatal issue -git config --global --add safe.directory '*' -git submodule update --init --recursive - -${OSX_DIR}/build_c_lib.sh - -# reinstall pyenv -rm -rf /Users/kbuilder/.pyenv -git clone https://github.com/pyenv/pyenv.git /Users/kbuilder/.pyenv - -SUPPORTED_PYTHON_VERSIONS=("3.9" "3.10" "3.11" "3.12" "3.13" "3.14") - -for PYTHON_VERSION in ${SUPPORTED_PYTHON_VERSIONS[@]}; do - echo "Build wheel for Python ${PYTHON_VERSION}" - export PY_BIN=$PYTHON_VERSION - export PY_TAG="cp${PYTHON_VERSION//.}-cp${PYTHON_VERSION//.}" - . /${OSX_DIR}/build_python_wheel.sh -done - -# Clean up. -rm -fr ${CRC32C_INSTALL_PREFIX} diff --git a/packages/google-crc32c/scripts/osx/build_c_lib.sh b/packages/google-crc32c/scripts/osx/build_c_lib.sh deleted file mode 100755 index 4d9ae0e6543c..000000000000 --- a/packages/google-crc32c/scripts/osx/build_c_lib.sh +++ /dev/null @@ -1,77 +0,0 @@ -#!/bin/bash -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Build and install `libcrc32c` - -set -e -x - -# Check that the install prefix is set. Exit early if the -# directory already exists. -if [[ -z "${CRC32C_INSTALL_PREFIX}" ]]; then - echo "CRC32C_INSTALL_PREFIX environment variable should be set by the caller." - exit 1 -fi -if [[ -d "${CRC32C_INSTALL_PREFIX}" ]]; then - echo "CRC32C_INSTALL_PREFIX=${CRC32C_INSTALL_PREFIX} already exists." - exit 0 -fi - -# Check that the REPO_ROOT and PY_BIN environment variables are set. -if [[ -z "${REPO_ROOT}" ]]; then - echo "REPO_ROOT environment variable should be set by the caller." - exit 1 -fi - -if [[ -z "${PY_BIN}" ]]; then - echo "PY_BIN environment variable should be set by the caller." - exit 1 -fi - -# Precreate install directories for crc32 lib. -mkdir -p ${CRC32C_INSTALL_PREFIX} -mkdir -p ${CRC32C_INSTALL_PREFIX}/lib - -# Create a virtualenv where we can install `cmake`. -VENV=${REPO_ROOT}/venv_build_libcrc32c -${PY_BIN} -m venv ${VENV} -${VENV}/bin/python -m pip install --upgrade pip -${VENV}/bin/python -m pip install "cmake >= 3.12.0" - -# Build `libcrc32c` -cd ${REPO_ROOT}/google_crc32c -mkdir -p build -cd build -ls - -# We don't build i386 anymore as XCode no longer supports. -${VENV}/bin/cmake \ - -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_OSX_DEPLOYMENT_TARGET=10.9 \ - -DCMAKE_OSX_ARCHITECTURES="x86_64;arm64" \ - -DCRC32C_BUILD_TESTS=no \ - -DCRC32C_BUILD_BENCHMARKS=no \ - -DBUILD_SHARED_LIBS=yes \ - -DCMAKE_INSTALL_PREFIX:PATH=${CRC32C_INSTALL_PREFIX} \ - -DCMAKE_INSTALL_NAME_DIR:PATH=${CRC32C_INSTALL_PREFIX}/lib \ - -DCMAKE_POLICY_VERSION_MINIMUM=3.12 \ - .. - -# Install `libcrc32c` into CRC32C_INSTALL_PREFIX. -make all install - -# Clean up. -cd .. -rm -fr ${REPO_ROOT}/google_crc32c/build -rm -fr ${VENV} diff --git a/packages/google-crc32c/scripts/osx/build_python_wheel.sh b/packages/google-crc32c/scripts/osx/build_python_wheel.sh deleted file mode 100755 index 1695a1595eb1..000000000000 --- a/packages/google-crc32c/scripts/osx/build_python_wheel.sh +++ /dev/null @@ -1,100 +0,0 @@ -#!/bin/bash -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Build a **single** Python wheel for a specified version. The version and -# associated paths should be set as environment variables; the expected -# environment variables will be verified below. - -set -e -x - -# Check that the REPO_ROOT, PY_BIN and PY_TAG environment variables are set. -if [[ -z "${REPO_ROOT}" ]]; then - echo "REPO_ROOT environment variable should be set by the caller." - exit 1 -fi -if [[ -z "${PY_BIN}" ]]; then - echo "PY_BIN environment variable should be set by the caller." - exit 1 -fi -if [[ -z "${PY_TAG}" ]]; then - echo "PY_TAG environment variable should be set by the caller." - exit 1 -fi - -# set up pyenv & shell environment for switching across python versions -eval "$(pyenv init -)" -eval "$(pyenv init --path)" - -install_python_pyenv() { - version=$1 - - if [ -z "$(pyenv versions --bare | grep $version)" ]; then - echo "Python $version is not installed. Installing..." - pyenv install $version - echo "Python $version installed." - else - echo "Python $version is already installed." - fi - pyenv shell $version -} -install_python_pyenv ${PY_BIN} - - -# Rely on the REPO_ROOT already provided by the parent script -OSX_DIR="${REPO_ROOT}/scripts/osx" - -# Create a virtualenv where we can install Python build dependencies. -VENV=${REPO_ROOT}/venv${PY_BIN} -"python${PY_BIN}" -m venv ${VENV} - -curl https://bootstrap.pypa.io/get-pip.py | ${VENV}/bin/python -${VENV}/bin/python -m pip install \ - --requirement ${REPO_ROOT}/scripts/dev-requirements.txt - -# Create the wheel. -DIST_WHEELS="${REPO_ROOT}/dist_wheels" -mkdir -p ${DIST_WHEELS} -cd ${REPO_ROOT} -${VENV}/bin/python setup.py build_ext \ - --include-dirs=${REPO_ROOT}/usr/include \ - --library-dirs=${REPO_ROOT}/usr/lib \ - --rpath=${REPO_ROOT}/usr/lib -${VENV}/bin/python -m pip wheel ${REPO_ROOT} --wheel-dir ${DIST_WHEELS} - -# Delocate the wheel. -FIXED_WHEELS="${REPO_ROOT}/wheels" -mkdir -p ${FIXED_WHEELS} -${VENV}/bin/delocate-wheel \ - --wheel-dir ${FIXED_WHEELS} \ - --verbose \ - --check-archs \ - ${DIST_WHEELS}/google_crc32c*${PY_TAG}*.whl - -if [[ "${PUBLISH_WHEELS}" == "true" ]]; then - . /${OSX_DIR}/publish_python_wheel.sh -fi - -# test wheel -${VENV}/bin/pip install \ - --no-index --find-links=${REPO_ROOT}/wheels google-crc32c --force-reinstall -${VENV}/bin/pip install pytest -${VENV}/bin/py.test ${REPO_ROOT}/tests -${VENV}/bin/python ${REPO_ROOT}/scripts/check_crc32c_extension.py - -ls ${REPO_ROOT}/wheels/ - -# Clean up. -rm -fr ${DIST_WHEELS} -rm -fr ${VENV} diff --git a/packages/google-crc32c/scripts/osx/publish_python_wheel.sh b/packages/google-crc32c/scripts/osx/publish_python_wheel.sh deleted file mode 100755 index c5288eac09fc..000000000000 --- a/packages/google-crc32c/scripts/osx/publish_python_wheel.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -if [ -z "$(pyenv versions --bare | grep 3.8)" ]; then - echo "Python 3.8 is not installed. Installing..." - pyenv install 3.8 -fi -pyenv shell 3.8 - -python -m pip install "setuptools<71" - -# Start the releasetool reporter -python -m pip install --require-hashes -r ${REPO_ROOT}/.kokoro/requirements.txt -python -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -ls ${REPO_ROOT}/wheels/ -# Disable logging -set +x -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-3") -python -m twine upload --skip-existing --username __token__ --password "${TWINE_PASSWORD}" ${REPO_ROOT}/wheels/* diff --git a/packages/google-crc32c/scripts/requirements.in b/packages/google-crc32c/scripts/requirements.in deleted file mode 100644 index 0508fa5e096b..000000000000 --- a/packages/google-crc32c/scripts/requirements.in +++ /dev/null @@ -1,4 +0,0 @@ -setuptools -pip -wheel -cmake \ No newline at end of file diff --git a/packages/google-crc32c/scripts/requirements.txt b/packages/google-crc32c/scripts/requirements.txt deleted file mode 100644 index ca66848ed4f1..000000000000 --- a/packages/google-crc32c/scripts/requirements.txt +++ /dev/null @@ -1,41 +0,0 @@ -# -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -cmake==3.31.6 \ - --hash=sha256:024a79ca3d2c355f75875b6cc92d907afd710d1a4ffde2f20a7da712a2f4b1c3 \ - --hash=sha256:112b36427e59bd26145b705a49d5f70b16433a655ce807cb8fdd81dd4d0e60c2 \ - --hash=sha256:13f2e636dc27834fe096f53301d6efb913b4b501fdc0ed03f386c0a7e7ec1a21 \ - --hash=sha256:1c8b05df0602365da91ee6a3336fe57525b137706c4ab5675498f662ae1dbcec \ - --hash=sha256:2297e9591307d9c61e557efe737bcf4d7c13a30f1f860732f684a204fee24dca \ - --hash=sha256:42d9883b8958da285d53d5f69d40d9650c2d1bcf922d82b3ebdceb2b3a7d4521 \ - --hash=sha256:4326f6c6f39867a60e2822fea8e6aedbcac09c9f59ad3f0f3386a890a2c8d89d \ - --hash=sha256:547efc1d0e27a194da819a0392fe645a9b8f1485bc2c3f34ae4f1e682cfd3153 \ - --hash=sha256:689441fc74fbb03673c67e20d4636614a231634d5e803387cd213d2cdf9675fc \ - --hash=sha256:6cb97adae7e5390ce68f8b7f38e1be1c72bf19e9f6727f31f8fa1c095b39be88 \ - --hash=sha256:6f77db820af725bb92fab60c4c9d67f64442ac0ea9b933aca4cd4586219cbd1f \ - --hash=sha256:8b67bf9613dfb59c12ce643c6be582c49c981e6eee28c4c244aeb3248b33f05e \ - --hash=sha256:8edddfbf367fa1bcf4b9f3064470bc0e1022f70609c0cf69c863961897826205 \ - --hash=sha256:9eed74a1f2a29a7cd92a9f071a35d64645b19802beb393ec250d6e7c09441314 \ - --hash=sha256:9f170e3c6933dba64f333cb456823bbb1d0ac126f94aa4a577e40855d2b1ca49 \ - --hash=sha256:bbaed969cef3c427f4f17591feb28db4ae595e3a4bbd45cb35522cee14df6a32 \ - --hash=sha256:ce5fc0299ecafe489b2614daa6176c3c2baacea6bc3b359bac9aa25b46ed43e9 \ - --hash=sha256:cefb910be81e1b4fdc3b89ef61819c3e848b3906ed56ac36d090f37cfa05666b \ - --hash=sha256:da9d4fd9abd571fd016ddb27da0428b10277010b23bb21e3678f8b9e96e1686e - # via -r requirements.in -wheel==0.45.1 \ - --hash=sha256:661e1abd9198507b1409a20c02106d9670b2576e916d58f520316666abca6729 \ - --hash=sha256:708e7481cc80179af0e556bbf0cc00b8444c7321e2700b8d8580231d13017248 - # via -r requirements.in - -# The following packages are considered to be unsafe in a requirements file: -pip==25.0.1 \ - --hash=sha256:88f96547ea48b940a3a385494e181e29fb8637898f88d88737c5049780f196ea \ - --hash=sha256:c46efd13b6aa8279f33f2864459c8ce587ea6a1a59ee20de055868d8f7688f7f - # via -r requirements.in -setuptools==78.1.0 \ - --hash=sha256:18fd474d4a82a5f83dac888df697af65afa82dec7323d09c3e37d1f14288da54 \ - --hash=sha256:3e386e96793c8702ae83d17b853fb93d3e09ef82ec62722e61da5cd22376dcd8 - # via -r requirements.in diff --git a/packages/google-crc32c/scripts/windows/build.bat b/packages/google-crc32c/scripts/windows/build.bat deleted file mode 100644 index 3757d911d4ce..000000000000 --- a/packages/google-crc32c/scripts/windows/build.bat +++ /dev/null @@ -1,79 +0,0 @@ -@rem Copyright 2019 Google LLC. All rights reserved. -@rem -@rem Licensed under the Apache License, Version 2.0 (the "License"); -@rem you may not use this file except in compliance with the License. -@rem You may obtain a copy of the License at -@rem -@rem http://www.apache.org/licenses/LICENSE-2.0 -@rem -@rem Unless required by applicable law or agreed to in writing, software -@rem distributed under the License is distributed on an "AS IS" BASIS, -@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -@rem See the License for the specific language governing permissions and -@rem limitations under the License. - - -setlocal ENABLEDELAYEDEXPANSION - -set CMAKE_GENERATOR="Visual Studio 17 2022" -set CONFIGURATION=RelWithDebInfo -set CRC32C_INSTALL_PREFIX=%cd%\build\%CONFIGURATION% - -@rem Iterate through supported Python versions. -@rem Unfortunately pyenv for Windows has an out-of-date versions list. Choco's -@rem installer seems to have some problems with installing multiple versions at -@rem once, so as a workaround, we will install and then uninstall every version. -FOR %%P IN (3.9, 3.10, 3.11, 3.12, 3.13.1, 3.14.0) DO ( - - echo "Installing Python version %%P" - choco install python --version=%%P -y --no-progress - - echo "Listing available Python versions' - py -0 - - set python_version=%%P - set python_version_trimmed=!python_version:~0,4! - - py -!python_version_trimmed!-64 -m pip install --upgrade pip - - echo "Installing cmake for Python %%P" - py -!python_version_trimmed!-64 -m pip install cmake - - @rem Add directory as safe to avoid "detected dubious ownership" fatal issue - git config --global --add safe.directory * - git submodule update --init --recursive - pushd google_crc32c - @rem reset hard to cleanup any changes done by a previous build. - git reset --hard - git clean -fxd - - del /s /q CMakeFiles\ - del CMakeCache.txt - - mkdir build - cd build - - echo "Running cmake with Generator: %CMAKE_GENERATOR%, Platform: x64, Install Prefix: %CRC32C_INSTALL_PREFIX%" - - py -!python_version_trimmed!-64 -m cmake -G "Visual Studio 17 2022" -A x64 -DCMAKE_POLICY_VERSION_MINIMUM=3.12 -DCRC32C_BUILD_BENCHMARKS=no -DCRC32C_BUILD_TESTS=no -DBUILD_SHARED_LIBS=yes -DCMAKE_WINDOWS_EXPORT_ALL_SYMBOLS=yes -DCRC32C_USE_GLOG=0 -DCMAKE_INSTALL_PREFIX:PATH="%CRC32C_INSTALL_PREFIX%" .. - - py -!python_version_trimmed!-64 -m cmake --build . --config "%CONFIGURATION%" --target install - - dir %CRC32C_INSTALL_PREFIX% /b /s - popd - - dir %CRC32C_INSTALL_PREFIX%\bin - echo "Copying Binary to root: %CRC32C_INSTALL_PREFIX%\bin\crc32c.dll" - copy %CRC32C_INSTALL_PREFIX%\bin\crc32c.dll . - - py -!python_version_trimmed!-64 -m pip install --upgrade pip setuptools wheel - echo "Building C extension" - py -!python_version_trimmed!-64 setup.py build_ext -v --include-dirs=%CRC32C_INSTALL_PREFIX%\include --library-dirs=%CRC32C_INSTALL_PREFIX%\lib - echo "Building Wheel" - py -!python_version_trimmed!-64 -m pip wheel . --wheel-dir wheels/ - - echo "Built wheel, now running tests." - call %~dp0/test.bat !python_version_trimmed! || goto :error - - echo "Finished with Python version %P" -) diff --git a/packages/google-crc32c/scripts/windows/test.bat b/packages/google-crc32c/scripts/windows/test.bat deleted file mode 100644 index c7f28501d6bb..000000000000 --- a/packages/google-crc32c/scripts/windows/test.bat +++ /dev/null @@ -1,32 +0,0 @@ -@rem Copyright 2019 Google LLC. All rights reserved. -@rem -@rem Licensed under the Apache License, Version 2.0 (the "License"); -@rem you may not use this file except in compliance with the License. -@rem You may obtain a copy of the License at -@rem -@rem http://www.apache.org/licenses/LICENSE-2.0 -@rem -@rem Unless required by applicable law or agreed to in writing, software -@rem distributed under the License is distributed on an "AS IS" BASIS, -@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -@rem See the License for the specific language governing permissions and -@rem limitations under the License. - -@rem This test file runs for one Python version at a time, and is intended to -@rem be called from within the build loop. - -set PYTHON_VERSION=%1 -if "%PYTHON_VERSION%"=="" ( - echo "Python version was not provided, using Python 3.10" - set PYTHON_VERSION=3.10 -) - -py -%PYTHON_VERSION%-64 -m pip install --no-index --find-links=wheels google-crc32c --force-reinstall - -py -%PYTHON_VERSION%-64 ./scripts/check_crc32c_extension.py - -@rem pyreadline is removed here because pytest will opportunistically use it if -@rem available, but the installed version is too old to work. -py -%PYTHON_VERSION%-64 -m pip uninstall -y pyreadline -py -%PYTHON_VERSION%-64 -m pip install pytest -py -%PYTHON_VERSION%-64 -m pytest tests diff --git a/packages/google-geo-type/.repo-metadata.json b/packages/google-geo-type/.repo-metadata.json index c8b04a86825d..040c783e482d 100644 --- a/packages/google-geo-type/.repo-metadata.json +++ b/packages/google-geo-type/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "", - "api_id": "type.googleapis.com", - "api_shortname": "type", - "client_documentation": "https://googleapis.dev/python/geotype/latest", - "default_version": "apiVersion", - "distribution_name": "google-geo-type", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "OTHER", - "name": "geotype", - "name_pretty": "Geo Type Protos", - "product_documentation": "https://mapsplatform.google.com/maps-products", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Defines common types for Google Geo APIs.", + "api_id": "type.googleapis.com", + "api_shortname": "type", + "client_documentation": "https://googleapis.dev/python/geotype/latest", + "default_version": "apiVersion", + "distribution_name": "google-geo-type", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "OTHER", + "name": "geotype", + "name_pretty": "Geo Type Protos", + "product_documentation": "https://mapsplatform.google.com/maps-products", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-geo-type/README.rst b/packages/google-geo-type/README.rst index d9f6fa1dfdec..c41c9ee7f1ef 100644 --- a/packages/google-geo-type/README.rst +++ b/packages/google-geo-type/README.rst @@ -3,7 +3,7 @@ Python Client for Geo Type Protos |preview| |pypi| |versions| -`Geo Type Protos`_: +`Geo Type Protos`_: Defines common types for Google Geo APIs. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-geo-type/docs/README.rst b/packages/google-geo-type/docs/README.rst index d9f6fa1dfdec..c41c9ee7f1ef 100644 --- a/packages/google-geo-type/docs/README.rst +++ b/packages/google-geo-type/docs/README.rst @@ -3,7 +3,7 @@ Python Client for Geo Type Protos |preview| |pypi| |versions| -`Geo Type Protos`_: +`Geo Type Protos`_: Defines common types for Google Geo APIs. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/google-maps-addressvalidation/.repo-metadata.json b/packages/google-maps-addressvalidation/.repo-metadata.json index 780c25b79298..e8bc396a81f8 100644 --- a/packages/google-maps-addressvalidation/.repo-metadata.json +++ b/packages/google-maps-addressvalidation/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Address Validation lets you validate and correct address inputs with Places data powered by Google Maps Platform.", - "api_id": "addressvalidation.googleapis.com", - "api_shortname": "addressvalidation", - "client_documentation": "https://googleapis.dev/python/addressvalidation/latest", - "default_version": "v1", - "distribution_name": "google-maps-addressvalidation", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "addressvalidation", - "name_pretty": "Address Validation API", - "product_documentation": "https://mapsplatform.google.com/maps-products/address-validation/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Address Validation lets you validate and correct address inputs with Places data powered by Google Maps Platform.", + "api_id": "addressvalidation.googleapis.com", + "api_shortname": "addressvalidation", + "client_documentation": "https://googleapis.dev/python/addressvalidation/latest", + "default_version": "v1", + "distribution_name": "google-maps-addressvalidation", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "addressvalidation", + "name_pretty": "Address Validation API", + "product_documentation": "https://mapsplatform.google.com/maps-products/address-validation/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-maps-areainsights/.repo-metadata.json b/packages/google-maps-areainsights/.repo-metadata.json index e4298c50c3de..1f25f9d47caf 100644 --- a/packages/google-maps-areainsights/.repo-metadata.json +++ b/packages/google-maps-areainsights/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Places Insights API. ", - "api_id": "areainsights.googleapis.com", - "api_shortname": "areainsights", - "client_documentation": "https://googleapis.dev/python/google-maps-areainsights/latest", - "default_version": "v1", - "distribution_name": "google-maps-areainsights", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1624013&template=2026178", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-maps-areainsights", - "name_pretty": "Places Insights API", - "product_documentation": "https://developers.google.com/maps/documentation/places-insights", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Places Insights API. ", + "api_id": "areainsights.googleapis.com", + "api_shortname": "areainsights", + "client_documentation": "https://googleapis.dev/python/google-maps-areainsights/latest", + "default_version": "v1", + "distribution_name": "google-maps-areainsights", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1624013\u0026template=2026178", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-maps-areainsights", + "name_pretty": "Places Insights API", + "product_documentation": "https://developers.google.com/maps/documentation/places-insights", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-maps-fleetengine-delivery/.repo-metadata.json b/packages/google-maps-fleetengine-delivery/.repo-metadata.json index b8b6f7eab3e0..cf9109b94414 100644 --- a/packages/google-maps-fleetengine-delivery/.repo-metadata.json +++ b/packages/google-maps-fleetengine-delivery/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Enables Fleet Engine for access to the On Demand Rides and Deliveries and Last Mile Fleet Solution APIs. Customer's use of Google Maps Content in the Cloud Logging Services is subject to the Google Maps Platform Terms of Service located at https://cloud.google.com/maps-platform/terms.", - "api_id": "fleetengine.googleapis.com", - "api_shortname": "fleetengine", - "client_documentation": "https://googleapis.dev/python/fleetengine-delivery/latest", - "default_version": "v1", - "distribution_name": "google-maps-fleetengine-delivery", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "fleetengine-delivery", - "name_pretty": "Last Mile Fleet Solution Delivery API", - "product_documentation": "https://developers.google.com/maps/documentation/transportation-logistics/mobility", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Enables Fleet Engine for access to the On Demand Rides and Deliveries and Last Mile Fleet Solution APIs. Customer's use of Google Maps Content in the Cloud Logging Services is subject to the Google Maps Platform Terms of Service located at https://cloud.google.com/maps-platform/terms.", + "api_id": "fleetengine.googleapis.com", + "api_shortname": "fleetengine", + "client_documentation": "https://googleapis.dev/python/fleetengine-delivery/latest", + "default_version": "v1", + "distribution_name": "google-maps-fleetengine-delivery", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "fleetengine-delivery", + "name_pretty": "Last Mile Fleet Solution Delivery API", + "product_documentation": "https://developers.google.com/maps/documentation/transportation-logistics/mobility", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-maps-fleetengine/.repo-metadata.json b/packages/google-maps-fleetengine/.repo-metadata.json index 7da92a594c06..bcf8f2e14d8b 100644 --- a/packages/google-maps-fleetengine/.repo-metadata.json +++ b/packages/google-maps-fleetengine/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Enables Fleet Engine for access to the On Demand Rides and Deliveries and Last Mile Fleet Solution APIs. Customer's use of Google Maps Content in the Cloud Logging Services is subject to the Google Maps Platform Terms of Service located at https://cloud.google.com/maps-platform/terms.", - "api_id": "fleetengine.googleapis.com", - "api_shortname": "fleetengine", - "client_documentation": "https://googleapis.dev/python/fleetengine/latest", - "default_version": "v1", - "distribution_name": "google-maps-fleetengine", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "fleetengine", - "name_pretty": "Local Rides and Deliveries API", - "product_documentation": "https://developers.google.com/maps/documentation/transportation-logistics/mobility", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Enables Fleet Engine for access to the On Demand Rides and Deliveries and Last Mile Fleet Solution APIs. Customer's use of Google Maps Content in the Cloud Logging Services is subject to the Google Maps Platform Terms of Service located at https://cloud.google.com/maps-platform/terms.", + "api_id": "fleetengine.googleapis.com", + "api_shortname": "fleetengine", + "client_documentation": "https://googleapis.dev/python/fleetengine/latest", + "default_version": "v1", + "distribution_name": "google-maps-fleetengine", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "fleetengine", + "name_pretty": "Local Rides and Deliveries API", + "product_documentation": "https://developers.google.com/maps/documentation/transportation-logistics/mobility", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-maps-geocode/.repo-metadata.json b/packages/google-maps-geocode/.repo-metadata.json index 6b14abc307a7..db8f46fb1eb8 100644 --- a/packages/google-maps-geocode/.repo-metadata.json +++ b/packages/google-maps-geocode/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Convert addresses into geographic coordinates (geocoding), which you can\nuse to place markers or position the map. This API also allows you to\nconvert geographic coordinates into an address (reverse geocoding).", - "api_id": "geocoding-backend.googleapis.com", - "api_shortname": "geocoding-backend", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-maps-geocode/latest", - "default_version": "v4", - "distribution_name": "google-maps-geocode", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=188871&template=788907", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-maps-geocode", - "name_pretty": "Geocoding API", - "product_documentation": "https://developers.google.com/maps/documentation/geocoding/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" -} + "api_description": "Convert addresses into geographic coordinates (geocoding), which you can\nuse to place markers or position the map. This API also allows you to\nconvert geographic coordinates into an address (reverse geocoding).", + "api_id": "geocoding-backend.googleapis.com", + "api_shortname": "geocoding-backend", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-maps-geocode/latest", + "default_version": "v4", + "distribution_name": "google-maps-geocode", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=188871\u0026template=788907", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-maps-geocode", + "name_pretty": "Geocoding API", + "product_documentation": "https://developers.google.com/maps/documentation/geocoding/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-maps-mapsplatformdatasets/.repo-metadata.json b/packages/google-maps-mapsplatformdatasets/.repo-metadata.json index 8cd808757440..1c91363cee98 100644 --- a/packages/google-maps-mapsplatformdatasets/.repo-metadata.json +++ b/packages/google-maps-mapsplatformdatasets/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Maps Platform Datasets API", - "api_id": "mapsplatformdatasets.googleapis.com", - "api_shortname": "mapsplatformdatasets", - "client_documentation": "https://googleapis.dev/python/mapsplatformdatasets/latest", - "default_version": "v1", - "distribution_name": "google-maps-mapsplatformdatasets", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "mapsplatformdatasets", - "name_pretty": "Maps Platform Datasets API", - "product_documentation": "https://developers.google.com/maps", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Maps Platform Datasets API", + "api_id": "mapsplatformdatasets.googleapis.com", + "api_shortname": "mapsplatformdatasets", + "client_documentation": "https://googleapis.dev/python/mapsplatformdatasets/latest", + "default_version": "v1", + "distribution_name": "google-maps-mapsplatformdatasets", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "mapsplatformdatasets", + "name_pretty": "Maps Platform Datasets API", + "product_documentation": "https://developers.google.com/maps", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-maps-navconnect/.repo-metadata.json b/packages/google-maps-navconnect/.repo-metadata.json index 05a0de175b58..f7ae27b4e2aa 100644 --- a/packages/google-maps-navconnect/.repo-metadata.json +++ b/packages/google-maps-navconnect/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Navigation Connect API.", - "api_id": "navigationconnect.googleapis.com", - "api_shortname": "navigationconnect", - "client_documentation": "https://cloud.google.com/python/docs/reference/google-maps-navconnect/latest", - "default_version": "v1", - "distribution_name": "google-maps-navconnect", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1180397&template=1812135", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-maps-navconnect", - "name_pretty": "Navigation Connect API", - "product_documentation": "https://developers.google.com/maps/documentation/mobility/navigationconnect", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" -} + "api_description": "Navigation Connect API.", + "api_id": "navigationconnect.googleapis.com", + "api_shortname": "navigationconnect", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-maps-navconnect/latest", + "default_version": "v1", + "distribution_name": "google-maps-navconnect", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1180397\u0026template=1812135", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-maps-navconnect", + "name_pretty": "Navigation Connect API", + "product_documentation": "https://developers.google.com/maps/documentation/mobility/navigationconnect", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-maps-places/.repo-metadata.json b/packages/google-maps-places/.repo-metadata.json index 37a39d31528f..28548ad6528b 100644 --- a/packages/google-maps-places/.repo-metadata.json +++ b/packages/google-maps-places/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "The Places API allows developers to access a variety of search and retrieval endpoints for a Place.", - "api_id": "places.googleapis.com", - "api_shortname": "places", - "client_documentation": "https://googleapis.dev/python/places/latest", - "default_version": "v1", - "distribution_name": "google-maps-places", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "places", - "name_pretty": "Places API", - "product_documentation": "https://developers.google.com/maps/documentation/places/web-service/", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "The Places API allows developers to access a variety of search and retrieval endpoints for a Place.", + "api_id": "places.googleapis.com", + "api_shortname": "places", + "client_documentation": "https://googleapis.dev/python/places/latest", + "default_version": "v1", + "distribution_name": "google-maps-places", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "places", + "name_pretty": "Places API", + "product_documentation": "https://developers.google.com/maps/documentation/places/web-service/", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-maps-routeoptimization/.repo-metadata.json b/packages/google-maps-routeoptimization/.repo-metadata.json index 39e7d7e0b618..446c4cf6cbdd 100644 --- a/packages/google-maps-routeoptimization/.repo-metadata.json +++ b/packages/google-maps-routeoptimization/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "The Route Optimization API assigns tasks and routes to a vehicle fleet, optimizing against the objectives and constraints that you supply for your transportation goals.", - "api_id": "routeoptimization.googleapis.com", - "api_shortname": "routeoptimization", - "client_documentation": "https://googleapis.dev/python/google-maps-routeoptimization/latest", - "default_version": "v1", - "distribution_name": "google-maps-routeoptimization", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1546507", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-maps-routeoptimization", - "name_pretty": "Route Optimization API", - "product_documentation": "https://developers.google.com/maps/documentation/route-optimization", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "The Route Optimization API assigns tasks and routes to a vehicle fleet, optimizing against the objectives and constraints that you supply for your transportation goals.", + "api_id": "routeoptimization.googleapis.com", + "api_shortname": "routeoptimization", + "client_documentation": "https://googleapis.dev/python/google-maps-routeoptimization/latest", + "default_version": "v1", + "distribution_name": "google-maps-routeoptimization", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1546507", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-maps-routeoptimization", + "name_pretty": "Route Optimization API", + "product_documentation": "https://developers.google.com/maps/documentation/route-optimization", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-maps-routing/.repo-metadata.json b/packages/google-maps-routing/.repo-metadata.json index 5d8e95f0995d..faa378482197 100644 --- a/packages/google-maps-routing/.repo-metadata.json +++ b/packages/google-maps-routing/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Help your users find the ideal way to get from A to Z with comprehensive data and real-time traffic.", - "api_id": "routing.googleapis.com", - "api_shortname": "routing", - "client_documentation": "https://googleapis.dev/python/routing/latest", - "default_version": "v2", - "distribution_name": "google-maps-routing", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "routing", - "name_pretty": "Google Maps Routing", - "product_documentation": "https://mapsplatform.google.com/maps-products/#routes-section", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Help your users find the ideal way to get from A to Z with comprehensive data and real-time traffic.", + "api_id": "routing.googleapis.com", + "api_shortname": "routing", + "client_documentation": "https://googleapis.dev/python/routing/latest", + "default_version": "v2", + "distribution_name": "google-maps-routing", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "routing", + "name_pretty": "Google Maps Routing", + "product_documentation": "https://mapsplatform.google.com/maps-products/#routes-section", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-maps-solar/.repo-metadata.json b/packages/google-maps-solar/.repo-metadata.json index 99bbe49a41ef..1113f1859211 100644 --- a/packages/google-maps-solar/.repo-metadata.json +++ b/packages/google-maps-solar/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "The Google Maps Platform Solar API is a service focused on helping accelerate solar and energy system installations.", - "api_id": "solar.googleapis.com", - "api_shortname": "solar", - "client_documentation": "https://googleapis.dev/python/google-maps-solar/latest", - "default_version": "v1", - "distribution_name": "google-maps-solar", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=1356349", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-maps-solar", - "name_pretty": "Solar API", - "product_documentation": "https://developers.google.com/maps/documentation/solar/overview", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "The Google Maps Platform Solar API is a service focused on helping accelerate solar and energy system installations.", + "api_id": "solar.googleapis.com", + "api_shortname": "solar", + "client_documentation": "https://googleapis.dev/python/google-maps-solar/latest", + "default_version": "v1", + "distribution_name": "google-maps-solar", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1356349", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-maps-solar", + "name_pretty": "Solar API", + "product_documentation": "https://developers.google.com/maps/documentation/solar/overview", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-resumable-media/.repo-metadata.json b/packages/google-resumable-media/.repo-metadata.json index 1e4c926f061e..0a2cb7cb8dc2 100644 --- a/packages/google-resumable-media/.repo-metadata.json +++ b/packages/google-resumable-media/.repo-metadata.json @@ -1,12 +1,10 @@ { - "name": "google-resumable-media", - "name_pretty": "Google Resumable Media", "client_documentation": "https://cloud.google.com/python/docs/reference/google-resumable-media/latest", - "release_level": "preview", + "distribution_name": "google-resumable-media", "language": "python", "library_type": "CORE", - "repo": "googleapis/google-resumable-media-python", - "distribution_name": "google-resumable-media", - "default_version": "", - "codeowner_team": "@googleapis/gcs-team" -} + "name": "google-resumable-media", + "name_pretty": "Google Resumable Media", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/google-resumable-media/docs/README.rst b/packages/google-resumable-media/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/packages/google-resumable-media/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/packages/google-resumable-media/docs/README.rst b/packages/google-resumable-media/docs/README.rst new file mode 100644 index 000000000000..ae3e8823aab4 --- /dev/null +++ b/packages/google-resumable-media/docs/README.rst @@ -0,0 +1,32 @@ +``google-resumable-media`` +========================== + + +Utilities for Google Media Downloads and Resumable Uploads + + +See the `docs`_ for examples and usage. + +.. _docs: https://googleapis.dev/python/google-resumable-media/latest/index.html + +Experimental `asyncio` Support +------------------------------ +While still in development and subject to change, this library has `asyncio` +support at `google._async_resumable_media`. + +Supported Python Versions +------------------------- +Python >= 3.9 + +Unsupported Python Versions +--------------------------- + +Python <= 3.8 + + +License +------- + +Apache 2.0 - See `the LICENSE`_ for more information. + +.. _the LICENSE: https://github.com/googleapis/google-resumable-media-python/blob/main/LICENSE diff --git a/packages/google-shopping-css/.repo-metadata.json b/packages/google-shopping-css/.repo-metadata.json index e2570dfb5ff0..e201818df629 100644 --- a/packages/google-shopping-css/.repo-metadata.json +++ b/packages/google-shopping-css/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Programmatically manage your Comparison Shopping Service (CSS) account data at scale.", - "api_id": "css.googleapis.com", - "api_shortname": "css", - "client_documentation": "https://googleapis.dev/python/google-shopping-css/latest", - "default_version": "v1", - "distribution_name": "google-shopping-css", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=826068&template=1564577", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-shopping-css", - "name_pretty": "CSS API", - "product_documentation": "https://developers.google.com/comparison-shopping-services/api", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Programmatically manage your Comparison Shopping Service (CSS) account data at scale.", + "api_id": "css.googleapis.com", + "api_shortname": "css", + "client_documentation": "https://googleapis.dev/python/google-shopping-css/latest", + "default_version": "v1", + "distribution_name": "google-shopping-css", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=826068\u0026template=1564577", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-shopping-css", + "name_pretty": "CSS API", + "product_documentation": "https://developers.google.com/comparison-shopping-services/api", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-shopping-merchant-accounts/.repo-metadata.json b/packages/google-shopping-merchant-accounts/.repo-metadata.json index a171bbb4c4e9..ac4d4243839c 100644 --- a/packages/google-shopping-merchant-accounts/.repo-metadata.json +++ b/packages/google-shopping-merchant-accounts/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Programmatically manage your Merchant Center accounts.", - "api_id": "accounts.googleapis.com", - "api_shortname": "accounts", - "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-accounts/latest", - "default_version": "v1", - "distribution_name": "google-shopping-merchant-accounts", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-shopping-merchant-accounts", - "name_pretty": "Merchant API", - "product_documentation": "https://developers.google.com/merchant/api", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Programmatically manage your Merchant Center accounts.", + "api_id": "accounts.googleapis.com", + "api_shortname": "accounts", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-accounts/latest", + "default_version": "v1", + "distribution_name": "google-shopping-merchant-accounts", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-shopping-merchant-accounts", + "name_pretty": "Merchant API", + "product_documentation": "https://developers.google.com/merchant/api", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-shopping-merchant-conversions/.repo-metadata.json b/packages/google-shopping-merchant-conversions/.repo-metadata.json index 5c95a9fd175d..ad46ef99f142 100644 --- a/packages/google-shopping-merchant-conversions/.repo-metadata.json +++ b/packages/google-shopping-merchant-conversions/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Programmatically manage your Merchant Center accounts.", - "api_id": "merchantapi.googleapis.com", - "api_shortname": "conversions", - "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-conversions/latest", - "default_version": "v1", - "distribution_name": "google-shopping-merchant-conversions", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-shopping-merchant-conversions", - "name_pretty": "Merchant API", - "product_documentation": "https://developers.google.com/merchant/api", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Programmatically manage your Merchant Center accounts.", + "api_id": "merchantapi.googleapis.com", + "api_shortname": "conversions", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-conversions/latest", + "default_version": "v1", + "distribution_name": "google-shopping-merchant-conversions", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-shopping-merchant-conversions", + "name_pretty": "Merchant API", + "product_documentation": "https://developers.google.com/merchant/api", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-shopping-merchant-datasources/.repo-metadata.json b/packages/google-shopping-merchant-datasources/.repo-metadata.json index efd2cd386222..d65852216931 100644 --- a/packages/google-shopping-merchant-datasources/.repo-metadata.json +++ b/packages/google-shopping-merchant-datasources/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Programmatically manage your Merchant Center accounts.", - "api_id": "datasources.googleapis.com", - "api_shortname": "datasources", - "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-datasources/latest", - "default_version": "v1", - "distribution_name": "google-shopping-merchant-datasources", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-shopping-merchant-datasources", - "name_pretty": "Merchant API", - "product_documentation": "https://developers.google.com/merchant/api", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Programmatically manage your Merchant Center accounts.", + "api_id": "datasources.googleapis.com", + "api_shortname": "datasources", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-datasources/latest", + "default_version": "v1", + "distribution_name": "google-shopping-merchant-datasources", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-shopping-merchant-datasources", + "name_pretty": "Merchant API", + "product_documentation": "https://developers.google.com/merchant/api", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-shopping-merchant-inventories/.repo-metadata.json b/packages/google-shopping-merchant-inventories/.repo-metadata.json index 972667d207d9..20f3dcaca510 100644 --- a/packages/google-shopping-merchant-inventories/.repo-metadata.json +++ b/packages/google-shopping-merchant-inventories/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Programmatically manage your Merchant Center accounts.", - "api_id": "inventories.googleapis.com", - "api_shortname": "inventories", - "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-inventories/latest", - "default_version": "v1", - "distribution_name": "google-shopping-merchant-inventories", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=171084&template=555201", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-shopping-merchant-inventories", - "name_pretty": "Merchant Inventories API", - "product_documentation": "https://developers.google.com/merchant/api", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Programmatically manage your Merchant Center accounts.", + "api_id": "inventories.googleapis.com", + "api_shortname": "inventories", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-inventories/latest", + "default_version": "v1", + "distribution_name": "google-shopping-merchant-inventories", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=171084\u0026template=555201", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-shopping-merchant-inventories", + "name_pretty": "Merchant Inventories API", + "product_documentation": "https://developers.google.com/merchant/api", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-shopping-merchant-issueresolution/.repo-metadata.json b/packages/google-shopping-merchant-issueresolution/.repo-metadata.json index 3a7a7ef0b1c6..2640c24c107b 100644 --- a/packages/google-shopping-merchant-issueresolution/.repo-metadata.json +++ b/packages/google-shopping-merchant-issueresolution/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Programmatically manage your Merchant Center Accounts. ", - "api_id": "issueresolution.googleapis.com", - "api_shortname": "issueresolution", - "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-issueresolution/latest", - "default_version": "v1", - "distribution_name": "google-shopping-merchant-issueresolution", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=171084&template=555201", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-shopping-merchant-issueresolution", - "name_pretty": "Merchant API", - "product_documentation": "https://developers.google.com/merchant/api", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Programmatically manage your Merchant Center Accounts. ", + "api_id": "issueresolution.googleapis.com", + "api_shortname": "issueresolution", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-issueresolution/latest", + "default_version": "v1", + "distribution_name": "google-shopping-merchant-issueresolution", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=171084\u0026template=555201", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-shopping-merchant-issueresolution", + "name_pretty": "Merchant API", + "product_documentation": "https://developers.google.com/merchant/api", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-shopping-merchant-lfp/.repo-metadata.json b/packages/google-shopping-merchant-lfp/.repo-metadata.json index d1c737c13fc9..4d6293c99ad9 100644 --- a/packages/google-shopping-merchant-lfp/.repo-metadata.json +++ b/packages/google-shopping-merchant-lfp/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Programmatically manage your Merchant Center accounts.", - "api_id": "merchantapi.googleapis.com", - "api_shortname": "lfp", - "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-lfp/latest", - "default_version": "v1", - "distribution_name": "google-shopping-merchant-lfp", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-shopping-merchant-lfp", - "name_pretty": "Merchant API", - "product_documentation": "https://developers.google.com/merchant/api", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Programmatically manage your Merchant Center accounts.", + "api_id": "merchantapi.googleapis.com", + "api_shortname": "lfp", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-lfp/latest", + "default_version": "v1", + "distribution_name": "google-shopping-merchant-lfp", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-shopping-merchant-lfp", + "name_pretty": "Merchant API", + "product_documentation": "https://developers.google.com/merchant/api", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-shopping-merchant-notifications/.repo-metadata.json b/packages/google-shopping-merchant-notifications/.repo-metadata.json index 199a58f0cd5a..4a9df3662c68 100644 --- a/packages/google-shopping-merchant-notifications/.repo-metadata.json +++ b/packages/google-shopping-merchant-notifications/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Programmatically manage your Merchant Center accounts.", - "api_id": "merchantapi.googleapis.com", - "api_shortname": "notifications", - "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-notifications/latest", - "default_version": "v1", - "distribution_name": "google-shopping-merchant-notifications", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-shopping-merchant-notifications", - "name_pretty": "Merchant API", - "product_documentation": "https://developers.google.com/merchant/api", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Programmatically manage your Merchant Center accounts.", + "api_id": "merchantapi.googleapis.com", + "api_shortname": "notifications", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-notifications/latest", + "default_version": "v1", + "distribution_name": "google-shopping-merchant-notifications", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-shopping-merchant-notifications", + "name_pretty": "Merchant API", + "product_documentation": "https://developers.google.com/merchant/api", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-shopping-merchant-ordertracking/.repo-metadata.json b/packages/google-shopping-merchant-ordertracking/.repo-metadata.json index 1969d82c03bc..8f0648de46ce 100644 --- a/packages/google-shopping-merchant-ordertracking/.repo-metadata.json +++ b/packages/google-shopping-merchant-ordertracking/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Programmatically manage your Merchant Center Accounts. ", - "api_id": "ordertracking.googleapis.com", - "api_shortname": "ordertracking", - "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-ordertracking/latest", - "default_version": "v1", - "distribution_name": "google-shopping-merchant-ordertracking", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=171084&template=555201", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-shopping-merchant-ordertracking", - "name_pretty": "Merchant API", - "product_documentation": "https://developers.google.com/merchant/api", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Programmatically manage your Merchant Center Accounts. ", + "api_id": "ordertracking.googleapis.com", + "api_shortname": "ordertracking", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-ordertracking/latest", + "default_version": "v1", + "distribution_name": "google-shopping-merchant-ordertracking", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=171084\u0026template=555201", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-shopping-merchant-ordertracking", + "name_pretty": "Merchant API", + "product_documentation": "https://developers.google.com/merchant/api", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-shopping-merchant-products/.repo-metadata.json b/packages/google-shopping-merchant-products/.repo-metadata.json index 603f7240d8a3..e2a2a848d4ff 100644 --- a/packages/google-shopping-merchant-products/.repo-metadata.json +++ b/packages/google-shopping-merchant-products/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Programmatically manage your Merchant Center accounts.", - "api_id": "products.googleapis.com", - "api_shortname": "products", - "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-products/latest", - "default_version": "v1", - "distribution_name": "google-shopping-merchant-products", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-shopping-merchant-products", - "name_pretty": "Merchant API", - "product_documentation": "https://developers.google.com/merchant/api", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Programmatically manage your Merchant Center accounts.", + "api_id": "products.googleapis.com", + "api_shortname": "products", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-products/latest", + "default_version": "v1", + "distribution_name": "google-shopping-merchant-products", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-shopping-merchant-products", + "name_pretty": "Merchant API", + "product_documentation": "https://developers.google.com/merchant/api", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-shopping-merchant-productstudio/.repo-metadata.json b/packages/google-shopping-merchant-productstudio/.repo-metadata.json index 61fea277ff14..b275d79aada9 100644 --- a/packages/google-shopping-merchant-productstudio/.repo-metadata.json +++ b/packages/google-shopping-merchant-productstudio/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Programmatically manage your Merchant Center accounts.", - "api_id": "productstudio.googleapis.com", - "api_shortname": "productstudio", - "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-productstudio/latest", - "default_version": "v1alpha", - "distribution_name": "google-shopping-merchant-productstudio", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-shopping-merchant-productstudio", - "name_pretty": "Merchant ProductStudio API", - "product_documentation": "https://developers.google.com/merchant/api", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Programmatically manage your Merchant Center accounts.", + "api_id": "productstudio.googleapis.com", + "api_shortname": "productstudio", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-productstudio/latest", + "default_version": "v1alpha", + "distribution_name": "google-shopping-merchant-productstudio", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-shopping-merchant-productstudio", + "name_pretty": "Merchant ProductStudio API", + "product_documentation": "https://developers.google.com/merchant/api", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-shopping-merchant-promotions/.repo-metadata.json b/packages/google-shopping-merchant-promotions/.repo-metadata.json index ec01ad8668ed..302876816854 100644 --- a/packages/google-shopping-merchant-promotions/.repo-metadata.json +++ b/packages/google-shopping-merchant-promotions/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Programmatically manage your Merchant Center accounts.", - "api_id": "promotions.googleapis.com", - "api_shortname": "promotions", - "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-promotions/latest", - "default_version": "v1", - "distribution_name": "google-shopping-merchant-promotions", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-shopping-merchant-promotions", - "name_pretty": "Merchant API", - "product_documentation": "https://developers.google.com/merchant/api", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Programmatically manage your Merchant Center accounts.", + "api_id": "promotions.googleapis.com", + "api_shortname": "promotions", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-promotions/latest", + "default_version": "v1", + "distribution_name": "google-shopping-merchant-promotions", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-shopping-merchant-promotions", + "name_pretty": "Merchant API", + "product_documentation": "https://developers.google.com/merchant/api", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-shopping-merchant-quota/.repo-metadata.json b/packages/google-shopping-merchant-quota/.repo-metadata.json index 14c8ed2d65e5..515cd01820d1 100644 --- a/packages/google-shopping-merchant-quota/.repo-metadata.json +++ b/packages/google-shopping-merchant-quota/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Programmatically manage your Merchant Center accounts.", - "api_id": "merchantapi.googleapis.com", - "api_shortname": "merchantapi", - "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-quota/latest", - "default_version": "v1", - "distribution_name": "google-shopping-merchant-quota", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=171084&template=555201", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-shopping-merchant-quota", - "name_pretty": "Shopping Merchant Quota", - "product_documentation": "https://developers.google.com/merchant/api", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Programmatically manage your Merchant Center accounts.", + "api_id": "merchantapi.googleapis.com", + "api_shortname": "merchantapi", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-quota/latest", + "default_version": "v1", + "distribution_name": "google-shopping-merchant-quota", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=171084\u0026template=555201", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-shopping-merchant-quota", + "name_pretty": "Shopping Merchant Quota", + "product_documentation": "https://developers.google.com/merchant/api", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-shopping-merchant-reports/.repo-metadata.json b/packages/google-shopping-merchant-reports/.repo-metadata.json index 4ed851cb7301..f93ddb2621a2 100644 --- a/packages/google-shopping-merchant-reports/.repo-metadata.json +++ b/packages/google-shopping-merchant-reports/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Programmatically manage your Merchant Center accounts", - "api_id": "reports.googleapis.com", - "api_shortname": "reports", - "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-reports/latest", - "default_version": "v1", - "distribution_name": "google-shopping-merchant-reports", - "issue_tracker": "https://issuetracker.google.com/issues/new?component=171084&template=555201", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-shopping-merchant-reports", - "name_pretty": "Merchant Reports API", - "product_documentation": "https://developers.google.com/merchant/api", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Programmatically manage your Merchant Center accounts", + "api_id": "reports.googleapis.com", + "api_shortname": "reports", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-reports/latest", + "default_version": "v1", + "distribution_name": "google-shopping-merchant-reports", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=171084\u0026template=555201", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-shopping-merchant-reports", + "name_pretty": "Merchant Reports API", + "product_documentation": "https://developers.google.com/merchant/api", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-shopping-merchant-reviews/.repo-metadata.json b/packages/google-shopping-merchant-reviews/.repo-metadata.json index c051782afedd..68c0675a5f54 100644 --- a/packages/google-shopping-merchant-reviews/.repo-metadata.json +++ b/packages/google-shopping-merchant-reviews/.repo-metadata.json @@ -1,16 +1,16 @@ { - "api_description": "Programmatically manage your Merchant Center Accounts", - "api_id": "reviews.googleapis.com", - "api_shortname": "reviews", - "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-reviews/latest", - "default_version": "v1beta", - "distribution_name": "google-shopping-merchant-reviews", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-shopping-merchant-reviews", - "name_pretty": "Merchant Reviews API", - "product_documentation": "https://developers.google.com/merchant/api", - "release_level": "preview", - "repo": "googleapis/google-cloud-python" + "api_description": "Programmatically manage your Merchant Center Accounts", + "api_id": "reviews.googleapis.com", + "api_shortname": "reviews", + "client_documentation": "https://googleapis.dev/python/google-shopping-merchant-reviews/latest", + "default_version": "v1beta", + "distribution_name": "google-shopping-merchant-reviews", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-shopping-merchant-reviews", + "name_pretty": "Merchant Reviews API", + "product_documentation": "https://developers.google.com/merchant/api", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/google-shopping-type/.repo-metadata.json b/packages/google-shopping-type/.repo-metadata.json index 4608e831c585..47d61649c3ea 100644 --- a/packages/google-shopping-type/.repo-metadata.json +++ b/packages/google-shopping-type/.repo-metadata.json @@ -1,16 +1,15 @@ { - "api_description": "", - "api_id": "type.googleapis.com", - "api_shortname": "type", - "client_documentation": "https://googleapis.dev/python/google-shopping-type/latest", - "default_version": "apiVersion", - "distribution_name": "google-shopping-type", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "google-shopping-type", - "name_pretty": "Shopping Type Protos", - "product_documentation": "https://developers.google.com/merchant/api", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_id": "type.googleapis.com", + "api_shortname": "type", + "client_documentation": "https://googleapis.dev/python/google-shopping-type/latest", + "default_version": "apiVersion", + "distribution_name": "google-shopping-type", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "google-shopping-type", + "name_pretty": "Shopping Type Protos", + "product_documentation": "https://developers.google.com/merchant/api", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/googleapis-common-protos/.repo-metadata.json b/packages/googleapis-common-protos/.repo-metadata.json index dedb772f7754..50e36e135e05 100644 --- a/packages/googleapis-common-protos/.repo-metadata.json +++ b/packages/googleapis-common-protos/.repo-metadata.json @@ -1,13 +1,16 @@ { - "client_documentation": "https://github.com/googleapis/google-cloud-python/tree/main/packages/googleapis-common-protos", - "default_version": "apiVersion", - "distribution_name": "googleapis-common-protos", - "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "language": "python", - "library_type": "CORE", - "name": "googleapis-common-protos", - "name_pretty": "Google APIs Common Protos", - "product_documentation": "https://github.com/googleapis/googleapis/tree/master/google", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Lets you define and config your API service.", + "api_id": "serviceconfig.googleapis.com", + "api_shortname": "serviceconfig", + "client_documentation": "https://github.com/googleapis/google-cloud-python/tree/main/packages/googleapis-common-protos", + "default_version": "apiVersion", + "distribution_name": "googleapis-common-protos", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "language": "python", + "library_type": "CORE", + "name": "googleapis-common-protos", + "name_pretty": "Google APIs Common Protos", + "product_documentation": "https://github.com/googleapis/googleapis/tree/master/google", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/googleapis-common-protos/README.rst b/packages/googleapis-common-protos/README.rst index 8205d17725fa..cc7e71a06484 100644 --- a/packages/googleapis-common-protos/README.rst +++ b/packages/googleapis-common-protos/README.rst @@ -3,7 +3,7 @@ Python Client for Google APIs Common Protos |stable| |pypi| |versions| -`Google APIs Common Protos`_: +`Google APIs Common Protos`_: Lets you define and config your API service. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/googleapis-common-protos/docs/README.rst b/packages/googleapis-common-protos/docs/README.rst index 8205d17725fa..cc7e71a06484 100644 --- a/packages/googleapis-common-protos/docs/README.rst +++ b/packages/googleapis-common-protos/docs/README.rst @@ -3,7 +3,7 @@ Python Client for Google APIs Common Protos |stable| |pypi| |versions| -`Google APIs Common Protos`_: +`Google APIs Common Protos`_: Lets you define and config your API service. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/googleapis-common-protos/google/api/annotations_pb2.py b/packages/googleapis-common-protos/google/api/annotations_pb2.py index ac84d0398d53..ac5cf8d8b5b9 100644 --- a/packages/googleapis-common-protos/google/api/annotations_pb2.py +++ b/packages/googleapis-common-protos/google/api/annotations_pb2.py @@ -29,9 +29,10 @@ _sym_db = _symbol_database.Default() -from google.api import http_pb2 as google_dot_api_dot_http__pb2 from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 +from google.api import http_pb2 as google_dot_api_dot_http__pb2 + DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( b'\n\x1cgoogle/api/annotations.proto\x12\ngoogle.api\x1a\x15google/api/http.proto\x1a google/protobuf/descriptor.proto:E\n\x04http\x12\x1e.google.protobuf.MethodOptions\x18\xb0\xca\xbc" \x01(\x0b\x32\x14.google.api.HttpRuleBn\n\x0e\x63om.google.apiB\x10\x41nnotationsProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xa2\x02\x04GAPIb\x06proto3' ) diff --git a/packages/googleapis-common-protos/google/api/annotations_pb2.pyi b/packages/googleapis-common-protos/google/api/annotations_pb2.pyi index 41b6de69d5f5..4c4afeb9545f 100644 --- a/packages/googleapis-common-protos/google/api/annotations_pb2.pyi +++ b/packages/googleapis-common-protos/google/api/annotations_pb2.pyi @@ -14,10 +14,11 @@ from typing import ClassVar as _ClassVar -from google.api import http_pb2 as _http_pb2 from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pb2 as _descriptor_pb2 +from google.api import http_pb2 as _http_pb2 + DESCRIPTOR: _descriptor.FileDescriptor HTTP_FIELD_NUMBER: _ClassVar[int] http: _descriptor.FieldDescriptor diff --git a/packages/googleapis-common-protos/google/api/client_pb2.py b/packages/googleapis-common-protos/google/api/client_pb2.py index 05a2e131737c..7c41fb1f1739 100644 --- a/packages/googleapis-common-protos/google/api/client_pb2.py +++ b/packages/googleapis-common-protos/google/api/client_pb2.py @@ -29,10 +29,11 @@ _sym_db = _symbol_database.Default() -from google.api import launch_stage_pb2 as google_dot_api_dot_launch__stage__pb2 from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.api import launch_stage_pb2 as google_dot_api_dot_launch__stage__pb2 + DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( b'\n\x17google/api/client.proto\x12\ngoogle.api\x1a\x1dgoogle/api/launch_stage.proto\x1a google/protobuf/descriptor.proto\x1a\x1egoogle/protobuf/duration.proto"\xbe\x01\n\x16\x43ommonLanguageSettings\x12\x1e\n\x12reference_docs_uri\x18\x01 \x01(\tB\x02\x18\x01\x12:\n\x0c\x64\x65stinations\x18\x02 \x03(\x0e\x32$.google.api.ClientLibraryDestination\x12H\n\x1aselective_gapic_generation\x18\x03 \x01(\x0b\x32$.google.api.SelectiveGapicGeneration"\xfb\x03\n\x15\x43lientLibrarySettings\x12\x0f\n\x07version\x18\x01 \x01(\t\x12-\n\x0claunch_stage\x18\x02 \x01(\x0e\x32\x17.google.api.LaunchStage\x12\x1a\n\x12rest_numeric_enums\x18\x03 \x01(\x08\x12/\n\rjava_settings\x18\x15 \x01(\x0b\x32\x18.google.api.JavaSettings\x12-\n\x0c\x63pp_settings\x18\x16 \x01(\x0b\x32\x17.google.api.CppSettings\x12-\n\x0cphp_settings\x18\x17 \x01(\x0b\x32\x17.google.api.PhpSettings\x12\x33\n\x0fpython_settings\x18\x18 \x01(\x0b\x32\x1a.google.api.PythonSettings\x12/\n\rnode_settings\x18\x19 \x01(\x0b\x32\x18.google.api.NodeSettings\x12\x33\n\x0f\x64otnet_settings\x18\x1a \x01(\x0b\x32\x1a.google.api.DotnetSettings\x12/\n\rruby_settings\x18\x1b \x01(\x0b\x32\x18.google.api.RubySettings\x12+\n\x0bgo_settings\x18\x1c \x01(\x0b\x32\x16.google.api.GoSettings"\xa8\x03\n\nPublishing\x12\x33\n\x0fmethod_settings\x18\x02 \x03(\x0b\x32\x1a.google.api.MethodSettings\x12\x15\n\rnew_issue_uri\x18\x65 \x01(\t\x12\x19\n\x11\x64ocumentation_uri\x18\x66 \x01(\t\x12\x16\n\x0e\x61pi_short_name\x18g \x01(\t\x12\x14\n\x0cgithub_label\x18h \x01(\t\x12\x1e\n\x16\x63odeowner_github_teams\x18i \x03(\t\x12\x16\n\x0e\x64oc_tag_prefix\x18j \x01(\t\x12;\n\x0corganization\x18k \x01(\x0e\x32%.google.api.ClientLibraryOrganization\x12;\n\x10library_settings\x18m \x03(\x0b\x32!.google.api.ClientLibrarySettings\x12)\n!proto_reference_documentation_uri\x18n \x01(\t\x12(\n rest_reference_documentation_uri\x18o \x01(\t"\xe3\x01\n\x0cJavaSettings\x12\x17\n\x0flibrary_package\x18\x01 \x01(\t\x12L\n\x13service_class_names\x18\x02 \x03(\x0b\x32/.google.api.JavaSettings.ServiceClassNamesEntry\x12\x32\n\x06\x63ommon\x18\x03 \x01(\x0b\x32".google.api.CommonLanguageSettings\x1a\x38\n\x16ServiceClassNamesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"A\n\x0b\x43ppSettings\x12\x32\n\x06\x63ommon\x18\x01 \x01(\x0b\x32".google.api.CommonLanguageSettings"Z\n\x0bPhpSettings\x12\x32\n\x06\x63ommon\x18\x01 \x01(\x0b\x32".google.api.CommonLanguageSettings\x12\x17\n\x0flibrary_package\x18\x02 \x01(\t"\x9b\x02\n\x0ePythonSettings\x12\x32\n\x06\x63ommon\x18\x01 \x01(\x0b\x32".google.api.CommonLanguageSettings\x12N\n\x15\x65xperimental_features\x18\x02 \x01(\x0b\x32/.google.api.PythonSettings.ExperimentalFeatures\x1a\x84\x01\n\x14\x45xperimentalFeatures\x12\x1d\n\x15rest_async_io_enabled\x18\x01 \x01(\x08\x12\'\n\x1fprotobuf_pythonic_types_enabled\x18\x02 \x01(\x08\x12$\n\x1cunversioned_package_disabled\x18\x03 \x01(\x08"B\n\x0cNodeSettings\x12\x32\n\x06\x63ommon\x18\x01 \x01(\x0b\x32".google.api.CommonLanguageSettings"\xaa\x03\n\x0e\x44otnetSettings\x12\x32\n\x06\x63ommon\x18\x01 \x01(\x0b\x32".google.api.CommonLanguageSettings\x12I\n\x10renamed_services\x18\x02 \x03(\x0b\x32/.google.api.DotnetSettings.RenamedServicesEntry\x12K\n\x11renamed_resources\x18\x03 \x03(\x0b\x32\x30.google.api.DotnetSettings.RenamedResourcesEntry\x12\x19\n\x11ignored_resources\x18\x04 \x03(\t\x12 \n\x18\x66orced_namespace_aliases\x18\x05 \x03(\t\x12\x1e\n\x16handwritten_signatures\x18\x06 \x03(\t\x1a\x36\n\x14RenamedServicesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x37\n\x15RenamedResourcesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"B\n\x0cRubySettings\x12\x32\n\x06\x63ommon\x18\x01 \x01(\x0b\x32".google.api.CommonLanguageSettings"\xbf\x01\n\nGoSettings\x12\x32\n\x06\x63ommon\x18\x01 \x01(\x0b\x32".google.api.CommonLanguageSettings\x12\x45\n\x10renamed_services\x18\x02 \x03(\x0b\x32+.google.api.GoSettings.RenamedServicesEntry\x1a\x36\n\x14RenamedServicesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x82\x03\n\x0eMethodSettings\x12\x10\n\x08selector\x18\x01 \x01(\t\x12<\n\x0clong_running\x18\x02 \x01(\x0b\x32&.google.api.MethodSettings.LongRunning\x12\x1d\n\x15\x61uto_populated_fields\x18\x03 \x03(\t\x12\x31\n\x08\x62\x61tching\x18\x04 \x01(\x0b\x32\x1f.google.api.BatchingConfigProto\x1a\xcd\x01\n\x0bLongRunning\x12\x35\n\x12initial_poll_delay\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x1d\n\x15poll_delay_multiplier\x18\x02 \x01(\x02\x12\x31\n\x0emax_poll_delay\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x35\n\x12total_poll_timeout\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration"Q\n\x18SelectiveGapicGeneration\x12\x0f\n\x07methods\x18\x01 \x03(\t\x12$\n\x1cgenerate_omitted_as_internal\x18\x02 \x01(\x08"\x8b\x01\n\x13\x42\x61tchingConfigProto\x12\x35\n\nthresholds\x18\x01 \x01(\x0b\x32!.google.api.BatchingSettingsProto\x12=\n\x10\x62\x61tch_descriptor\x18\x02 \x01(\x0b\x32#.google.api.BatchingDescriptorProto"\xeb\x02\n\x15\x42\x61tchingSettingsProto\x12\x1f\n\x17\x65lement_count_threshold\x18\x01 \x01(\x05\x12\x1e\n\x16request_byte_threshold\x18\x02 \x01(\x03\x12\x32\n\x0f\x64\x65lay_threshold\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x1b\n\x13\x65lement_count_limit\x18\x04 \x01(\x05\x12\x1a\n\x12request_byte_limit\x18\x05 \x01(\x05\x12"\n\x1a\x66low_control_element_limit\x18\x06 \x01(\x05\x12\x1f\n\x17\x66low_control_byte_limit\x18\x07 \x01(\x05\x12_\n$flow_control_limit_exceeded_behavior\x18\x08 \x01(\x0e\x32\x31.google.api.FlowControlLimitExceededBehaviorProto"i\n\x17\x42\x61tchingDescriptorProto\x12\x15\n\rbatched_field\x18\x01 \x01(\t\x12\x1c\n\x14\x64iscriminator_fields\x18\x02 \x03(\t\x12\x19\n\x11subresponse_field\x18\x03 \x01(\t*\xa3\x01\n\x19\x43lientLibraryOrganization\x12+\n\'CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED\x10\x00\x12\t\n\x05\x43LOUD\x10\x01\x12\x07\n\x03\x41\x44S\x10\x02\x12\n\n\x06PHOTOS\x10\x03\x12\x0f\n\x0bSTREET_VIEW\x10\x04\x12\x0c\n\x08SHOPPING\x10\x05\x12\x07\n\x03GEO\x10\x06\x12\x11\n\rGENERATIVE_AI\x10\x07*g\n\x18\x43lientLibraryDestination\x12*\n&CLIENT_LIBRARY_DESTINATION_UNSPECIFIED\x10\x00\x12\n\n\x06GITHUB\x10\n\x12\x13\n\x0fPACKAGE_MANAGER\x10\x14*g\n%FlowControlLimitExceededBehaviorProto\x12\x12\n\x0eUNSET_BEHAVIOR\x10\x00\x12\x13\n\x0fTHROW_EXCEPTION\x10\x01\x12\t\n\x05\x42LOCK\x10\x02\x12\n\n\x06IGNORE\x10\x03:9\n\x10method_signature\x12\x1e.google.protobuf.MethodOptions\x18\x9b\x08 \x03(\t:6\n\x0c\x64\x65\x66\x61ult_host\x12\x1f.google.protobuf.ServiceOptions\x18\x99\x08 \x01(\t:6\n\x0coauth_scopes\x12\x1f.google.protobuf.ServiceOptions\x18\x9a\x08 \x01(\t:8\n\x0b\x61pi_version\x12\x1f.google.protobuf.ServiceOptions\x18\xc1\xba\xab\xfa\x01 \x01(\tBi\n\x0e\x63om.google.apiB\x0b\x43lientProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xa2\x02\x04GAPIb\x06proto3' ) diff --git a/packages/googleapis-common-protos/google/api/client_pb2.pyi b/packages/googleapis-common-protos/google/api/client_pb2.pyi index 7e5bcdb4d5a3..369f0134c17b 100644 --- a/packages/googleapis-common-protos/google/api/client_pb2.pyi +++ b/packages/googleapis-common-protos/google/api/client_pb2.pyi @@ -18,7 +18,6 @@ from typing import Mapping as _Mapping from typing import Optional as _Optional from typing import Union as _Union -from google.api import launch_stage_pb2 as _launch_stage_pb2 from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pb2 as _descriptor_pb2 from google.protobuf import duration_pb2 as _duration_pb2 @@ -26,6 +25,8 @@ from google.protobuf import message as _message from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.api import launch_stage_pb2 as _launch_stage_pb2 + DESCRIPTOR: _descriptor.FileDescriptor class ClientLibraryOrganization(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): diff --git a/packages/googleapis-common-protos/google/api/control_pb2.pyi b/packages/googleapis-common-protos/google/api/control_pb2.pyi index 94af7c7865e3..c7898c3aca57 100644 --- a/packages/googleapis-common-protos/google/api/control_pb2.pyi +++ b/packages/googleapis-common-protos/google/api/control_pb2.pyi @@ -18,11 +18,12 @@ from typing import Mapping as _Mapping from typing import Optional as _Optional from typing import Union as _Union -from google.api import policy_pb2 as _policy_pb2 from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf.internal import containers as _containers +from google.api import policy_pb2 as _policy_pb2 + DESCRIPTOR: _descriptor.FileDescriptor class Control(_message.Message): diff --git a/packages/googleapis-common-protos/google/api/log_pb2.pyi b/packages/googleapis-common-protos/google/api/log_pb2.pyi index 4fafcd722a78..e1778599e961 100644 --- a/packages/googleapis-common-protos/google/api/log_pb2.pyi +++ b/packages/googleapis-common-protos/google/api/log_pb2.pyi @@ -18,11 +18,12 @@ from typing import Mapping as _Mapping from typing import Optional as _Optional from typing import Union as _Union -from google.api import label_pb2 as _label_pb2 from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf.internal import containers as _containers +from google.api import label_pb2 as _label_pb2 + DESCRIPTOR: _descriptor.FileDescriptor class LogDescriptor(_message.Message): diff --git a/packages/googleapis-common-protos/google/api/metric_pb2.py b/packages/googleapis-common-protos/google/api/metric_pb2.py index 2a44f7bf2dfa..53ab18b97266 100644 --- a/packages/googleapis-common-protos/google/api/metric_pb2.py +++ b/packages/googleapis-common-protos/google/api/metric_pb2.py @@ -29,9 +29,10 @@ _sym_db = _symbol_database.Default() +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 + from google.api import label_pb2 as google_dot_api_dot_label__pb2 from google.api import launch_stage_pb2 as google_dot_api_dot_launch__stage__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( b'\n\x17google/api/metric.proto\x12\ngoogle.api\x1a\x16google/api/label.proto\x1a\x1dgoogle/api/launch_stage.proto\x1a\x1egoogle/protobuf/duration.proto"\xac\x08\n\x10MetricDescriptor\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x08 \x01(\t\x12+\n\x06labels\x18\x02 \x03(\x0b\x32\x1b.google.api.LabelDescriptor\x12<\n\x0bmetric_kind\x18\x03 \x01(\x0e\x32\'.google.api.MetricDescriptor.MetricKind\x12:\n\nvalue_type\x18\x04 \x01(\x0e\x32&.google.api.MetricDescriptor.ValueType\x12\x0c\n\x04unit\x18\x05 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x07 \x01(\t\x12G\n\x08metadata\x18\n \x01(\x0b\x32\x35.google.api.MetricDescriptor.MetricDescriptorMetadata\x12-\n\x0claunch_stage\x18\x0c \x01(\x0e\x32\x17.google.api.LaunchStage\x12 \n\x18monitored_resource_types\x18\r \x03(\t\x1a\xbd\x03\n\x18MetricDescriptorMetadata\x12\x31\n\x0claunch_stage\x18\x01 \x01(\x0e\x32\x17.google.api.LaunchStageB\x02\x18\x01\x12\x30\n\rsample_period\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12/\n\x0cingest_delay\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x84\x01\n$time_series_resource_hierarchy_level\x18\x04 \x03(\x0e\x32V.google.api.MetricDescriptor.MetricDescriptorMetadata.TimeSeriesResourceHierarchyLevel"\x83\x01\n TimeSeriesResourceHierarchyLevel\x12\x34\n0TIME_SERIES_RESOURCE_HIERARCHY_LEVEL_UNSPECIFIED\x10\x00\x12\x0b\n\x07PROJECT\x10\x01\x12\x10\n\x0cORGANIZATION\x10\x02\x12\n\n\x06\x46OLDER\x10\x03"O\n\nMetricKind\x12\x1b\n\x17METRIC_KIND_UNSPECIFIED\x10\x00\x12\t\n\x05GAUGE\x10\x01\x12\t\n\x05\x44\x45LTA\x10\x02\x12\x0e\n\nCUMULATIVE\x10\x03"q\n\tValueType\x12\x1a\n\x16VALUE_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x42OOL\x10\x01\x12\t\n\x05INT64\x10\x02\x12\n\n\x06\x44OUBLE\x10\x03\x12\n\n\x06STRING\x10\x04\x12\x10\n\x0c\x44ISTRIBUTION\x10\x05\x12\t\n\x05MONEY\x10\x06"u\n\x06Metric\x12\x0c\n\x04type\x18\x03 \x01(\t\x12.\n\x06labels\x18\x02 \x03(\x0b\x32\x1e.google.api.Metric.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42_\n\x0e\x63om.google.apiB\x0bMetricProtoP\x01Z7google.golang.org/genproto/googleapis/api/metric;metric\xa2\x02\x04GAPIb\x06proto3' diff --git a/packages/googleapis-common-protos/google/api/metric_pb2.pyi b/packages/googleapis-common-protos/google/api/metric_pb2.pyi index f0c2fa50858c..2ff6b7439a28 100644 --- a/packages/googleapis-common-protos/google/api/metric_pb2.pyi +++ b/packages/googleapis-common-protos/google/api/metric_pb2.pyi @@ -18,14 +18,15 @@ from typing import Mapping as _Mapping from typing import Optional as _Optional from typing import Union as _Union -from google.api import label_pb2 as _label_pb2 -from google.api import launch_stage_pb2 as _launch_stage_pb2 from google.protobuf import descriptor as _descriptor from google.protobuf import duration_pb2 as _duration_pb2 from google.protobuf import message as _message from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.api import label_pb2 as _label_pb2 +from google.api import launch_stage_pb2 as _launch_stage_pb2 + DESCRIPTOR: _descriptor.FileDescriptor class MetricDescriptor(_message.Message): diff --git a/packages/googleapis-common-protos/google/api/monitored_resource_pb2.py b/packages/googleapis-common-protos/google/api/monitored_resource_pb2.py index fd764a5f3e89..51cc782b1b41 100644 --- a/packages/googleapis-common-protos/google/api/monitored_resource_pb2.py +++ b/packages/googleapis-common-protos/google/api/monitored_resource_pb2.py @@ -29,9 +29,10 @@ _sym_db = _symbol_database.Default() +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 + from google.api import label_pb2 as google_dot_api_dot_label__pb2 from google.api import launch_stage_pb2 as google_dot_api_dot_launch__stage__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( b'\n#google/api/monitored_resource.proto\x12\ngoogle.api\x1a\x16google/api/label.proto\x1a\x1dgoogle/api/launch_stage.proto\x1a\x1cgoogle/protobuf/struct.proto"\xc0\x01\n\x1bMonitoredResourceDescriptor\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12+\n\x06labels\x18\x04 \x03(\x0b\x32\x1b.google.api.LabelDescriptor\x12-\n\x0claunch_stage\x18\x07 \x01(\x0e\x32\x17.google.api.LaunchStage"\x8b\x01\n\x11MonitoredResource\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x39\n\x06labels\x18\x02 \x03(\x0b\x32).google.api.MonitoredResource.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xca\x01\n\x19MonitoredResourceMetadata\x12.\n\rsystem_labels\x18\x01 \x01(\x0b\x32\x17.google.protobuf.Struct\x12J\n\x0buser_labels\x18\x02 \x03(\x0b\x32\x35.google.api.MonitoredResourceMetadata.UserLabelsEntry\x1a\x31\n\x0fUserLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42v\n\x0e\x63om.google.apiB\x16MonitoredResourceProtoP\x01ZCgoogle.golang.org/genproto/googleapis/api/monitoredres;monitoredres\xa2\x02\x04GAPIb\x06proto3' diff --git a/packages/googleapis-common-protos/google/api/monitored_resource_pb2.pyi b/packages/googleapis-common-protos/google/api/monitored_resource_pb2.pyi index 2f5c8773ee1a..17b13e507bb7 100644 --- a/packages/googleapis-common-protos/google/api/monitored_resource_pb2.pyi +++ b/packages/googleapis-common-protos/google/api/monitored_resource_pb2.pyi @@ -18,13 +18,14 @@ from typing import Mapping as _Mapping from typing import Optional as _Optional from typing import Union as _Union -from google.api import label_pb2 as _label_pb2 -from google.api import launch_stage_pb2 as _launch_stage_pb2 from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import struct_pb2 as _struct_pb2 from google.protobuf.internal import containers as _containers +from google.api import label_pb2 as _label_pb2 +from google.api import launch_stage_pb2 as _launch_stage_pb2 + DESCRIPTOR: _descriptor.FileDescriptor class MonitoredResourceDescriptor(_message.Message): diff --git a/packages/googleapis-common-protos/google/api/service_pb2.py b/packages/googleapis-common-protos/google/api/service_pb2.py index e5d6882093e5..cc6f719d263e 100644 --- a/packages/googleapis-common-protos/google/api/service_pb2.py +++ b/packages/googleapis-common-protos/google/api/service_pb2.py @@ -29,6 +29,10 @@ _sym_db = _symbol_database.Default() +from google.protobuf import api_pb2 as google_dot_protobuf_dot_api__pb2 +from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2 +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 + from google.api import auth_pb2 as google_dot_api_dot_auth__pb2 from google.api import backend_pb2 as google_dot_api_dot_backend__pb2 from google.api import billing_pb2 as google_dot_api_dot_billing__pb2 @@ -49,9 +53,6 @@ from google.api import source_info_pb2 as google_dot_api_dot_source__info__pb2 from google.api import system_parameter_pb2 as google_dot_api_dot_system__parameter__pb2 from google.api import usage_pb2 as google_dot_api_dot_usage__pb2 -from google.protobuf import api_pb2 as google_dot_protobuf_dot_api__pb2 -from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2 -from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( b"\n\x18google/api/service.proto\x12\ngoogle.api\x1a\x15google/api/auth.proto\x1a\x18google/api/backend.proto\x1a\x18google/api/billing.proto\x1a\x17google/api/client.proto\x1a\x18google/api/context.proto\x1a\x18google/api/control.proto\x1a\x1egoogle/api/documentation.proto\x1a\x19google/api/endpoint.proto\x1a\x15google/api/http.proto\x1a\x14google/api/log.proto\x1a\x18google/api/logging.proto\x1a\x17google/api/metric.proto\x1a#google/api/monitored_resource.proto\x1a\x1bgoogle/api/monitoring.proto\x1a\x16google/api/quota.proto\x1a\x1cgoogle/api/source_info.proto\x1a!google/api/system_parameter.proto\x1a\x16google/api/usage.proto\x1a\x19google/protobuf/api.proto\x1a\x1agoogle/protobuf/type.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\x82\x08\n\x07Service\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x1b\n\x13producer_project_id\x18\x16 \x01(\t\x12\n\n\x02id\x18! \x01(\t\x12\"\n\x04\x61pis\x18\x03 \x03(\x0b\x32\x14.google.protobuf.Api\x12$\n\x05types\x18\x04 \x03(\x0b\x32\x15.google.protobuf.Type\x12$\n\x05\x65nums\x18\x05 \x03(\x0b\x32\x15.google.protobuf.Enum\x12\x30\n\rdocumentation\x18\x06 \x01(\x0b\x32\x19.google.api.Documentation\x12$\n\x07\x62\x61\x63kend\x18\x08 \x01(\x0b\x32\x13.google.api.Backend\x12\x1e\n\x04http\x18\t \x01(\x0b\x32\x10.google.api.Http\x12 \n\x05quota\x18\n \x01(\x0b\x32\x11.google.api.Quota\x12\x32\n\x0e\x61uthentication\x18\x0b \x01(\x0b\x32\x1a.google.api.Authentication\x12$\n\x07\x63ontext\x18\x0c \x01(\x0b\x32\x13.google.api.Context\x12 \n\x05usage\x18\x0f \x01(\x0b\x32\x11.google.api.Usage\x12'\n\tendpoints\x18\x12 \x03(\x0b\x32\x14.google.api.Endpoint\x12$\n\x07\x63ontrol\x18\x15 \x01(\x0b\x32\x13.google.api.Control\x12'\n\x04logs\x18\x17 \x03(\x0b\x32\x19.google.api.LogDescriptor\x12-\n\x07metrics\x18\x18 \x03(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x44\n\x13monitored_resources\x18\x19 \x03(\x0b\x32'.google.api.MonitoredResourceDescriptor\x12$\n\x07\x62illing\x18\x1a \x01(\x0b\x32\x13.google.api.Billing\x12$\n\x07logging\x18\x1b \x01(\x0b\x32\x13.google.api.Logging\x12*\n\nmonitoring\x18\x1c \x01(\x0b\x32\x16.google.api.Monitoring\x12\x37\n\x11system_parameters\x18\x1d \x01(\x0b\x32\x1c.google.api.SystemParameters\x12+\n\x0bsource_info\x18% \x01(\x0b\x32\x16.google.api.SourceInfo\x12*\n\npublishing\x18- \x01(\x0b\x32\x16.google.api.Publishing\x12\x34\n\x0e\x63onfig_version\x18\x14 \x01(\x0b\x32\x1c.google.protobuf.UInt32ValueBn\n\x0e\x63om.google.apiB\x0cServiceProtoP\x01ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\xa2\x02\x04GAPIb\x06proto3" diff --git a/packages/googleapis-common-protos/google/api/service_pb2.pyi b/packages/googleapis-common-protos/google/api/service_pb2.pyi index 46478065c5c0..a2e79a453be8 100644 --- a/packages/googleapis-common-protos/google/api/service_pb2.pyi +++ b/packages/googleapis-common-protos/google/api/service_pb2.pyi @@ -18,6 +18,13 @@ from typing import Mapping as _Mapping from typing import Optional as _Optional from typing import Union as _Union +from google.protobuf import api_pb2 as _api_pb2 +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import type_pb2 as _type_pb2 +from google.protobuf import wrappers_pb2 as _wrappers_pb2 +from google.protobuf.internal import containers as _containers + from google.api import auth_pb2 as _auth_pb2 from google.api import backend_pb2 as _backend_pb2 from google.api import billing_pb2 as _billing_pb2 @@ -36,12 +43,6 @@ from google.api import quota_pb2 as _quota_pb2 from google.api import source_info_pb2 as _source_info_pb2 from google.api import system_parameter_pb2 as _system_parameter_pb2 from google.api import usage_pb2 as _usage_pb2 -from google.protobuf import api_pb2 as _api_pb2 -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import type_pb2 as _type_pb2 -from google.protobuf import wrappers_pb2 as _wrappers_pb2 -from google.protobuf.internal import containers as _containers DESCRIPTOR: _descriptor.FileDescriptor diff --git a/packages/googleapis-common-protos/google/cloud/common_resources_pb2.pyi b/packages/googleapis-common-protos/google/cloud/common_resources_pb2.pyi index c2b5735d2e3b..524a461dc1fc 100644 --- a/packages/googleapis-common-protos/google/cloud/common_resources_pb2.pyi +++ b/packages/googleapis-common-protos/google/cloud/common_resources_pb2.pyi @@ -14,7 +14,8 @@ from typing import ClassVar as _ClassVar -from google.api import resource_pb2 as _resource_pb2 from google.protobuf import descriptor as _descriptor +from google.api import resource_pb2 as _resource_pb2 + DESCRIPTOR: _descriptor.FileDescriptor diff --git a/packages/googleapis-common-protos/google/cloud/location/locations_pb2.py b/packages/googleapis-common-protos/google/cloud/location/locations_pb2.py index d77b5983617c..d903a84385ad 100644 --- a/packages/googleapis-common-protos/google/cloud/location/locations_pb2.py +++ b/packages/googleapis-common-protos/google/cloud/location/locations_pb2.py @@ -29,9 +29,10 @@ _sym_db = _symbol_database.Default() +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 + from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( b'\n%google/cloud/location/locations.proto\x12\x15google.cloud.location\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/protobuf/any.proto\x1a\x17google/api/client.proto"[\n\x14ListLocationsRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"d\n\x15ListLocationsResponse\x12\x32\n\tlocations\x18\x01 \x03(\x0b\x32\x1f.google.cloud.location.Location\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t""\n\x12GetLocationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\xd7\x01\n\x08Location\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0blocation_id\x18\x04 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x05 \x01(\t\x12;\n\x06labels\x18\x02 \x03(\x0b\x32+.google.cloud.location.Location.LabelsEntry\x12&\n\x08metadata\x18\x03 \x01(\x0b\x32\x14.google.protobuf.Any\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x32\xa4\x03\n\tLocations\x12\xab\x01\n\rListLocations\x12+.google.cloud.location.ListLocationsRequest\x1a,.google.cloud.location.ListLocationsResponse"?\x82\xd3\xe4\x93\x02\x39\x12\x14/v1/{name=locations}Z!\x12\x1f/v1/{name=projects/*}/locations\x12\x9e\x01\n\x0bGetLocation\x12).google.cloud.location.GetLocationRequest\x1a\x1f.google.cloud.location.Location"C\x82\xd3\xe4\x93\x02=\x12\x16/v1/{name=locations/*}Z#\x12!/v1/{name=projects/*/locations/*}\x1aH\xca\x41\x14\x63loud.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBo\n\x19\x63om.google.cloud.locationB\x0eLocationsProtoP\x01Z=google.golang.org/genproto/googleapis/cloud/location;location\xf8\x01\x01\x62\x06proto3' diff --git a/packages/googleapis-common-protos/google/cloud/location/locations_pb2.pyi b/packages/googleapis-common-protos/google/cloud/location/locations_pb2.pyi index 9da2987cc707..714050027ecf 100644 --- a/packages/googleapis-common-protos/google/cloud/location/locations_pb2.pyi +++ b/packages/googleapis-common-protos/google/cloud/location/locations_pb2.pyi @@ -18,13 +18,14 @@ from typing import Mapping as _Mapping from typing import Optional as _Optional from typing import Union as _Union -from google.api import annotations_pb2 as _annotations_pb2 -from google.api import client_pb2 as _client_pb2 from google.protobuf import any_pb2 as _any_pb2 from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf.internal import containers as _containers +from google.api import annotations_pb2 as _annotations_pb2 +from google.api import client_pb2 as _client_pb2 + DESCRIPTOR: _descriptor.FileDescriptor class ListLocationsRequest(_message.Message): diff --git a/packages/grafeas/.repo-metadata.json b/packages/grafeas/.repo-metadata.json index 4afe408c22ad..1a27f0de19bb 100644 --- a/packages/grafeas/.repo-metadata.json +++ b/packages/grafeas/.repo-metadata.json @@ -1,17 +1,15 @@ { - "api_description": "An implementation of the Grafeas API, which stores, and enables querying and retrieval of critical metadata about all of your software artifacts.", - "api_id": "containeranalysis.googleapis.com", - "api_shortname": "containeranalysis", - "client_documentation": "https://googleapis.dev/python/grafeas/latest", - "default_version": "v1", - "distribution_name": "grafeas", - "issue_tracker": "", - "language": "python", - "library_type": "GAPIC_COMBO", - "name": "grafeas", - "name_pretty": "Grafeas", - "product_documentation": "https://grafeas.io", - "release_level": "stable", - "repo": "googleapis/google-cloud-python", - "requires_billing": false + "api_description": "An implementation of the Grafeas API, which stores, and enables querying and retrieval of critical metadata about all of your software artifacts.", + "api_id": "containeranalysis.googleapis.com", + "api_shortname": "containeranalysis", + "client_documentation": "https://googleapis.dev/python/grafeas/latest", + "default_version": "v1", + "distribution_name": "grafeas", + "language": "python", + "library_type": "GAPIC_COMBO", + "name": "grafeas", + "name_pretty": "Grafeas", + "product_documentation": "https://grafeas.io", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/grpc-google-iam-v1/.repo-metadata.json b/packages/grpc-google-iam-v1/.repo-metadata.json index ac47018a24fe..292b4de5964d 100644 --- a/packages/grpc-google-iam-v1/.repo-metadata.json +++ b/packages/grpc-google-iam-v1/.repo-metadata.json @@ -1,15 +1,16 @@ { - "api_id": "iam.googleapis.com", - "api_shortname": "iam", - "client_documentation": "https://cloud.google.com/python/docs/reference/grpc-iam/latest", - "default_version": "apiVersion", - "distribution_name": "grpc-google-iam-v1", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559761", - "language": "python", - "library_type": "GAPIC_AUTO", - "name": "grpc-iam", - "name_pretty": "Cloud Identity and Access Management", - "product_documentation": "https://cloud.google.com/iam/docs/", - "release_level": "stable", - "repo": "googleapis/google-cloud-python" + "api_description": "Manages access control for Google Cloud Platform resources.", + "api_id": "iam.googleapis.com", + "api_shortname": "iam", + "client_documentation": "https://cloud.google.com/python/docs/reference/grpc-iam/latest", + "default_version": "apiVersion", + "distribution_name": "grpc-google-iam-v1", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559761", + "language": "python", + "library_type": "GAPIC_AUTO", + "name": "grpc-iam", + "name_pretty": "Cloud Identity and Access Management", + "product_documentation": "https://cloud.google.com/iam/docs/", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" } \ No newline at end of file diff --git a/packages/grpc-google-iam-v1/README.rst b/packages/grpc-google-iam-v1/README.rst index 0cb4adf998d4..3a67e668e154 100644 --- a/packages/grpc-google-iam-v1/README.rst +++ b/packages/grpc-google-iam-v1/README.rst @@ -3,7 +3,7 @@ Python Client for Cloud Identity and Access Management |stable| |pypi| |versions| -`Cloud Identity and Access Management`_: +`Cloud Identity and Access Management`_: Manages access control for Google Cloud Platform resources. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/grpc-google-iam-v1/docs/README.rst b/packages/grpc-google-iam-v1/docs/README.rst index 0cb4adf998d4..3a67e668e154 100644 --- a/packages/grpc-google-iam-v1/docs/README.rst +++ b/packages/grpc-google-iam-v1/docs/README.rst @@ -3,7 +3,7 @@ Python Client for Cloud Identity and Access Management |stable| |pypi| |versions| -`Cloud Identity and Access Management`_: +`Cloud Identity and Access Management`_: Manages access control for Google Cloud Platform resources. - `Client Library Documentation`_ - `Product Documentation`_ diff --git a/packages/grpc-google-iam-v1/google/iam/v1/iam_policy_pb2.py b/packages/grpc-google-iam-v1/google/iam/v1/iam_policy_pb2.py index bdd73b609ebc..80f61635c098 100644 --- a/packages/grpc-google-iam-v1/google/iam/v1/iam_policy_pb2.py +++ b/packages/grpc-google-iam-v1/google/iam/v1/iam_policy_pb2.py @@ -33,9 +33,10 @@ from google.api import client_pb2 as google_dot_api_dot_client__pb2 from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 + from google.iam.v1 import options_pb2 as google_dot_iam_dot_v1_dot_options__pb2 from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( b'\n\x1egoogle/iam/v1/iam_policy.proto\x12\rgoogle.iam.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1bgoogle/iam/v1/options.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a google/protobuf/field_mask.proto"\x8f\x01\n\x13SetIamPolicyRequest\x12\x1b\n\x08resource\x18\x01 \x01(\tB\t\xe0\x41\x02\xfa\x41\x03\n\x01*\x12*\n\x06policy\x18\x02 \x01(\x0b\x32\x15.google.iam.v1.PolicyB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"d\n\x13GetIamPolicyRequest\x12\x1b\n\x08resource\x18\x01 \x01(\tB\t\xe0\x41\x02\xfa\x41\x03\n\x01*\x12\x30\n\x07options\x18\x02 \x01(\x0b\x32\x1f.google.iam.v1.GetPolicyOptions"R\n\x19TestIamPermissionsRequest\x12\x1b\n\x08resource\x18\x01 \x01(\tB\t\xe0\x41\x02\xfa\x41\x03\n\x01*\x12\x18\n\x0bpermissions\x18\x02 \x03(\tB\x03\xe0\x41\x02"1\n\x1aTestIamPermissionsResponse\x12\x13\n\x0bpermissions\x18\x01 \x03(\t2\xb4\x03\n\tIAMPolicy\x12t\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy")\x82\xd3\xe4\x93\x02#"\x1e/v1/{resource=**}:setIamPolicy:\x01*\x12t\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy")\x82\xd3\xe4\x93\x02#"\x1e/v1/{resource=**}:getIamPolicy:\x01*\x12\x9a\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"/\x82\xd3\xe4\x93\x02)"$/v1/{resource=**}:testIamPermissions:\x01*\x1a\x1e\xca\x41\x1biam-meta-api.googleapis.comB|\n\x11\x63om.google.iam.v1B\x0eIamPolicyProtoP\x01Z)cloud.google.com/go/iam/apiv1/iampb;iampb\xaa\x02\x13Google.Cloud.Iam.V1\xca\x02\x13Google\\Cloud\\Iam\\V1b\x06proto3' diff --git a/packages/grpc-google-iam-v1/google/iam/v1/iam_policy_pb2.pyi b/packages/grpc-google-iam-v1/google/iam/v1/iam_policy_pb2.pyi index 26e6b9ab8db5..8edc84055abe 100644 --- a/packages/grpc-google-iam-v1/google/iam/v1/iam_policy_pb2.pyi +++ b/packages/grpc-google-iam-v1/google/iam/v1/iam_policy_pb2.pyi @@ -22,13 +22,14 @@ from google.api import annotations_pb2 as _annotations_pb2 from google.api import client_pb2 as _client_pb2 from google.api import field_behavior_pb2 as _field_behavior_pb2 from google.api import resource_pb2 as _resource_pb2 -from google.iam.v1 import options_pb2 as _options_pb2 -from google.iam.v1 import policy_pb2 as _policy_pb2 from google.protobuf import descriptor as _descriptor from google.protobuf import field_mask_pb2 as _field_mask_pb2 from google.protobuf import message as _message from google.protobuf.internal import containers as _containers +from google.iam.v1 import options_pb2 as _options_pb2 +from google.iam.v1 import policy_pb2 as _policy_pb2 + DESCRIPTOR: _descriptor.FileDescriptor class SetIamPolicyRequest(_message.Message): diff --git a/packages/pandas-gbq/.repo-metadata.json b/packages/pandas-gbq/.repo-metadata.json index 3b7949caa858..c41ab50bbf48 100644 --- a/packages/pandas-gbq/.repo-metadata.json +++ b/packages/pandas-gbq/.repo-metadata.json @@ -1,15 +1,13 @@ { - "name": "pandas-gbq", - "name_pretty": "Google BigQuery connector for pandas", - "product_documentation": "https://cloud.google.com/bigquery", + "api_id": "bigquery.googleapis.com", "client_documentation": "https://googleapis.dev/python/pandas-gbq/latest/", + "distribution_name": "pandas-gbq", "issue_tracker": "https://github.com/googleapis/python-bigquery-pandas/issues", - "release_level": "preview", "language": "python", "library_type": "INTEGRATION", - "repo": "googleapis/google-cloud-python", - "distribution_name": "pandas-gbq", - "api_id": "bigquery.googleapis.com", - "default_version": "", - "codeowner_team": "@googleapis/cloud-sdk-python-team @googleapis/bigquery-dataframe-team" -} + "name": "pandas-gbq", + "name_pretty": "Google BigQuery connector for pandas", + "product_documentation": "https://cloud.google.com/bigquery", + "release_level": "preview", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/proto-plus/.repo-metadata.json b/packages/proto-plus/.repo-metadata.json index a44ac6d9fa37..cb51534a2154 100644 --- a/packages/proto-plus/.repo-metadata.json +++ b/packages/proto-plus/.repo-metadata.json @@ -1,13 +1,11 @@ { - "name": "proto-plus", - "name_pretty": "Proto Plus", "client_documentation": "https://googleapis.dev/python/proto-plus/latest", + "distribution_name": "proto-plus", "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "release_level": "stable", "language": "python", "library_type": "CORE", - "repo": "googleapis/google-cloud-python", - "distribution_name": "proto-plus", - "default_version": "", - "codeowner_team": "" -} + "name": "proto-plus", + "name_pretty": "Proto Plus", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/sqlalchemy-bigquery/.repo-metadata.json b/packages/sqlalchemy-bigquery/.repo-metadata.json index 269dc151ff13..6246353b6cda 100644 --- a/packages/sqlalchemy-bigquery/.repo-metadata.json +++ b/packages/sqlalchemy-bigquery/.repo-metadata.json @@ -1,13 +1,11 @@ { - "name": "sqlalchemy-bigquery", - "name_pretty": "SQLAlchemy dialect for BigQuery", + "api_id": "bigquery.googleapis.com", "client_documentation": "https://googleapis.dev/python/sqlalchemy-bigquery/latest/index.html", - "release_level": "preview", + "distribution_name": "sqlalchemy-bigquery", "language": "python", "library_type": "INTEGRATION", - "repo": "googleapis/google-cloud-python", - "distribution_name": "sqlalchemy-bigquery", - "api_id": "bigquery.googleapis.com", - "default_version": "", - "codeowner_team": "@googleapis/python-core-client-libraries" -} + "name": "sqlalchemy-bigquery", + "name_pretty": "SQLAlchemy dialect for BigQuery", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/sqlalchemy-bigquery/docs/README.rst b/packages/sqlalchemy-bigquery/docs/README.rst deleted file mode 120000 index 89a0106941ff..000000000000 --- a/packages/sqlalchemy-bigquery/docs/README.rst +++ /dev/null @@ -1 +0,0 @@ -../README.rst \ No newline at end of file diff --git a/packages/sqlalchemy-bigquery/docs/README.rst b/packages/sqlalchemy-bigquery/docs/README.rst new file mode 100644 index 000000000000..be964f0c251c --- /dev/null +++ b/packages/sqlalchemy-bigquery/docs/README.rst @@ -0,0 +1,363 @@ +SQLAlchemy Dialect for BigQuery +=============================== + +|GA| |pypi| |versions| + +`SQLALchemy Dialects`_ + +- `Dialect Documentation`_ +- `Product Documentation`_ + +.. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability +.. |pypi| image:: https://img.shields.io/pypi/v/sqlalchemy-bigquery.svg + :target: https://pypi.org/project/sqlalchemy-bigquery/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/sqlalchemy-bigquery.svg + :target: https://pypi.org/project/sqlalchemy-bigquery/ +.. _SQLAlchemy Dialects: https://docs.sqlalchemy.org/en/14/dialects/ +.. _Dialect Documentation: https://googleapis.dev/python/sqlalchemy-bigquery/latest +.. _Product Documentation: https://cloud.google.com/bigquery/docs/ + + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. [Optional] `Enable billing for your project.`_ +3. `Enable the BigQuery Storage API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the BigQuery Storage API.: https://console.cloud.google.com/apis/library/bigquery.googleapis.com +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + + +Installation +------------ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Python >= 3.9, <3.14 + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.7. + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install sqlalchemy-bigquery + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install sqlalchemy-bigquery + + +Installations when processing large datasets +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +When handling large datasets, you may see speed increases by also installing the +`bqstorage` dependencies. See the instructions above about creating a virtual +environment and then install `sqlalchemy-bigquery` using the `bqstorage` extras: + +.. code-block:: console + + source /bin/activate + /bin/pip install sqlalchemy-bigquery[bqstorage] + + +Usage +----- + +SQLAlchemy +^^^^^^^^^^ + +.. code-block:: python + + from sqlalchemy import * + from sqlalchemy.engine import create_engine + from sqlalchemy.schema import * + engine = create_engine('bigquery://project') + table = Table('dataset.table', MetaData(bind=engine), autoload=True) + print(select([func.count('*')], from_obj=table().scalar())) + + +Project +^^^^^^^ + +``project`` in ``bigquery://project`` is used to instantiate BigQuery client with the specific project ID. To infer project from the environment, use ``bigquery://`` – without ``project`` + +Authentication +^^^^^^^^^^^^^^ + +Follow the `Google Cloud library guide `_ for authentication. + +Alternatively, you can choose either of the following approaches: + +* provide the path to a service account JSON file in ``create_engine()`` using the ``credentials_path`` parameter: + +.. code-block:: python + + # provide the path to a service account JSON file + engine = create_engine('bigquery://', credentials_path='/path/to/keyfile.json') + +* pass the credentials in ``create_engine()`` as a Python dictionary using the ``credentials_info`` parameter: + +.. code-block:: python + + # provide credentials as a Python dictionary + credentials_info = { + "type": "service_account", + "project_id": "your-service-account-project-id" + } + engine = create_engine('bigquery://', credentials_info=credentials_info) + +Location +^^^^^^^^ + +To specify location of your datasets pass ``location`` to ``create_engine()``: + +.. code-block:: python + + engine = create_engine('bigquery://project', location="asia-northeast1") + + +Table names +^^^^^^^^^^^ + +To query tables from non-default projects or datasets, use the following format for the SQLAlchemy schema name: ``[project.]dataset``, e.g.: + +.. code-block:: python + + # If neither dataset nor project are the default + sample_table_1 = Table('natality', schema='bigquery-public-data.samples') + # If just dataset is not the default + sample_table_2 = Table('natality', schema='bigquery-public-data') + +Batch size +^^^^^^^^^^ + +By default, ``arraysize`` is set to ``5000``. ``arraysize`` is used to set the batch size for fetching results. To change it, pass ``arraysize`` to ``create_engine()``: + +.. code-block:: python + + engine = create_engine('bigquery://project', arraysize=1000) + +Page size for dataset.list_tables +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +By default, ``list_tables_page_size`` is set to ``1000``. ``list_tables_page_size`` is used to set the max_results for `dataset.list_tables`_ operation. To change it, pass ``list_tables_page_size`` to ``create_engine()``: + +.. _`dataset.list_tables`: https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/list +.. code-block:: python + + engine = create_engine('bigquery://project', list_tables_page_size=100) + +Adding a Default Dataset +^^^^^^^^^^^^^^^^^^^^^^^^ + +If you want to have the ``Client`` use a default dataset, specify it as the "database" portion of the connection string. + +.. code-block:: python + + engine = create_engine('bigquery://project/dataset') + +When using a default dataset, don't include the dataset name in the table name, e.g.: + +.. code-block:: python + + table = Table('table_name') + +Note that specifying a default dataset doesn't restrict execution of queries to that particular dataset when using raw queries, e.g.: + +.. code-block:: python + + # Set default dataset to dataset_a + engine = create_engine('bigquery://project/dataset_a') + + # This will still execute and return rows from dataset_b + engine.execute('SELECT * FROM dataset_b.table').fetchall() + + +Connection String Parameters +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +There are many situations where you can't call ``create_engine`` directly, such as when using tools like `Flask SQLAlchemy `_. For situations like these, or for situations where you want the ``Client`` to have a `default_query_job_config `_, you can pass many arguments in the query of the connection string. + +The ``credentials_path``, ``credentials_info``, ``credentials_base64``, ``location``, ``arraysize`` and ``list_tables_page_size`` parameters are used by this library, and the rest are used to create a `QueryJobConfig `_ + +Note that if you want to use query strings, it will be more reliable if you use three slashes, so ``'bigquery:///?a=b'`` will work reliably, but ``'bigquery://?a=b'`` might be interpreted as having a "database" of ``?a=b``, depending on the system being used to parse the connection string. + +Here are examples of all the supported arguments. Any not present are either for legacy sql (which isn't supported by this library), or are too complex and are not implemented. + +.. code-block:: python + + engine = create_engine( + 'bigquery://some-project/some-dataset' '?' + 'credentials_path=/some/path/to.json' '&' + 'location=some-location' '&' + 'arraysize=1000' '&' + 'list_tables_page_size=100' '&' + 'clustering_fields=a,b,c' '&' + 'create_disposition=CREATE_IF_NEEDED' '&' + 'destination=different-project.different-dataset.table' '&' + 'destination_encryption_configuration=some-configuration' '&' + 'dry_run=true' '&' + 'labels=a:b,c:d' '&' + 'maximum_bytes_billed=1000' '&' + 'priority=INTERACTIVE' '&' + 'schema_update_options=ALLOW_FIELD_ADDITION,ALLOW_FIELD_RELAXATION' '&' + 'use_query_cache=true' '&' + 'write_disposition=WRITE_APPEND' + ) + +In cases where you wish to include the full credentials in the connection URI you can base64 the credentials JSON file and supply the encoded string to the ``credentials_base64`` parameter. + +.. code-block:: python + + engine = create_engine( + 'bigquery://some-project/some-dataset' '?' + 'credentials_base64=eyJrZXkiOiJ2YWx1ZSJ9Cg==' '&' + 'location=some-location' '&' + 'arraysize=1000' '&' + 'list_tables_page_size=100' '&' + 'clustering_fields=a,b,c' '&' + 'create_disposition=CREATE_IF_NEEDED' '&' + 'destination=different-project.different-dataset.table' '&' + 'destination_encryption_configuration=some-configuration' '&' + 'dry_run=true' '&' + 'labels=a:b,c:d' '&' + 'maximum_bytes_billed=1000' '&' + 'priority=INTERACTIVE' '&' + 'schema_update_options=ALLOW_FIELD_ADDITION,ALLOW_FIELD_RELAXATION' '&' + 'use_query_cache=true' '&' + 'write_disposition=WRITE_APPEND' + ) + +To create the base64 encoded string you can use the command line tool ``base64``, or ``openssl base64``, or ``python -m base64``. + +Alternatively, you can use an online generator like `www.base64encode.org _` to paste your credentials JSON file to be encoded. + + +Supplying Your Own BigQuery Client +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The above connection string parameters allow you to influence how the BigQuery client used to execute your queries will be instantiated. +If you need additional control, you can supply a BigQuery client of your own: + +.. code-block:: python + + from google.cloud import bigquery + + custom_bq_client = bigquery.Client(...) + + engine = create_engine( + 'bigquery://some-project/some-dataset?user_supplied_client=True', + connect_args={'client': custom_bq_client}, + ) + + +Creating tables +^^^^^^^^^^^^^^^ + +To add metadata to a table: + +.. code-block:: python + + table = Table('mytable', ..., + bigquery_description='my table description', + bigquery_friendly_name='my table friendly name', + bigquery_default_rounding_mode="ROUND_HALF_EVEN", + bigquery_expiration_timestamp=datetime.datetime.fromisoformat("2038-01-01T00:00:00+00:00"), + ) + +To add metadata to a column: + +.. code-block:: python + + Column('mycolumn', doc='my column description') + +To create a clustered table: + +.. code-block:: python + + table = Table('mytable', ..., bigquery_clustering_fields=["a", "b", "c"]) + +To create a time-unit column-partitioned table: + +.. code-block:: python + + from google.cloud import bigquery + + table = Table('mytable', ..., + bigquery_time_partitioning=bigquery.TimePartitioning( + field="mytimestamp", + type_="MONTH", + expiration_ms=1000 * 60 * 60 * 24 * 30 * 6, # 6 months + ), + bigquery_require_partition_filter=True, + ) + +To create an ingestion-time partitioned table: + +.. code-block:: python + + from google.cloud import bigquery + + table = Table('mytable', ..., + bigquery_time_partitioning=bigquery.TimePartitioning(), + bigquery_require_partition_filter=True, + ) + +To create an integer-range partitioned table + +.. code-block:: python + + from google.cloud import bigquery + + table = Table('mytable', ..., + bigquery_range_partitioning=bigquery.RangePartitioning( + field="zipcode", + range_=bigquery.PartitionRange(start=0, end=100000, interval=10), + ), + bigquery_require_partition_filter=True, + ) + + +Threading and Multiprocessing +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Because this client uses the `grpc` library, it's safe to +share instances across threads. + +In multiprocessing scenarios, the best +practice is to create client instances *after* the invocation of +`os.fork` by `multiprocessing.pool.Pool` or +`multiprocessing.Process`. diff --git a/packages/sqlalchemy-spanner/.repo-metadata.json b/packages/sqlalchemy-spanner/.repo-metadata.json index 2ee9a72eda79..cea87992dc53 100644 --- a/packages/sqlalchemy-spanner/.repo-metadata.json +++ b/packages/sqlalchemy-spanner/.repo-metadata.json @@ -1,14 +1,13 @@ { - "name": "sqlalchemy-spanner", - "name_pretty": "Spanner dialect for SQLAlchemy", - "product_documentation": "https://cloud.google.com/spanner/docs", + "api_shortname": "sqlalchemy-spanner", "client_documentation": "https://github.com/googleapis/python-spanner-sqlalchemy", + "distribution_name": "sqlalchemy-spanner", "issue_tracker": "https://issuetracker.google.com/issues?q=componentid:190851%2B%20status:open", - "release_level": "stable", "language": "python", "library_type": "INTEGRATION", - "repo": "googleapis/google-cloud-python", - "distribution_name": "sqlalchemy-spanner", - "requires_billing": true, - "api_shortname": "sqlalchemy-spanner" -} + "name": "sqlalchemy-spanner", + "name_pretty": "Spanner dialect for SQLAlchemy", + "product_documentation": "https://cloud.google.com/spanner/docs", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file