From 3cf72aaf27113dcc2b1fccdea2a1478e62691b90 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 16 Dec 2025 19:15:38 +0000 Subject: [PATCH 1/4] chore(librarian): rename owlbot.py to librarian.py --- .librarian/generator-input/{owlbot.py => librarian.py} | 0 owlbot.py => librarian.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename .librarian/generator-input/{owlbot.py => librarian.py} (100%) rename owlbot.py => librarian.py (100%) diff --git a/.librarian/generator-input/owlbot.py b/.librarian/generator-input/librarian.py similarity index 100% rename from .librarian/generator-input/owlbot.py rename to .librarian/generator-input/librarian.py diff --git a/owlbot.py b/librarian.py similarity index 100% rename from owlbot.py rename to librarian.py From 36438bc0ed9b62a62e368492ec998e5b5c6cd547 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 16 Dec 2025 19:18:50 +0000 Subject: [PATCH 2/4] feat: update image to us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:b8058df4c45e9a6e07f6b4d65b458d0d059241dd34c814f151c8bf6b89211209 --- .librarian/state.yaml | 2 +- .../firestore_admin_v1/services/__init__.py | 3 - .../services/firestore_admin/__init__.py | 6 +- .../services/firestore_admin/async_client.py | 31 +- .../services/firestore_admin/client.py | 28 +- .../services/firestore_admin/pagers.py | 19 +- .../firestore_admin/transports/README.rst | 3 - .../firestore_admin/transports/__init__.py | 8 +- .../firestore_admin/transports/base.py | 32 +- .../firestore_admin/transports/grpc.py | 34 +- .../transports/grpc_asyncio.py | 38 +- .../firestore_admin/transports/rest.py | 47 +- .../firestore_admin/transports/rest_base.py | 32 +- .../firestore_admin_v1/types/__init__.py | 40 +- .../cloud/firestore_admin_v1/types/backup.py | 8 +- .../firestore_admin_v1/types/database.py | 8 +- .../cloud/firestore_admin_v1/types/field.py | 5 +- .../types/firestore_admin.py | 12 +- .../cloud/firestore_admin_v1/types/index.py | 5 +- .../firestore_admin_v1/types/location.py | 5 +- .../firestore_admin_v1/types/operation.py | 7 +- .../firestore_admin_v1/types/schedule.py | 8 +- .../firestore_admin_v1/types/snapshot.py | 8 +- .../firestore_admin_v1/types/user_creds.py | 8 +- google/cloud/firestore_bundle/__init__.py | 104 +++ .../cloud/firestore_v1/services/__init__.py | 3 - .../services/firestore/__init__.py | 6 +- .../services/firestore/async_client.py | 148 +--- .../firestore_v1/services/firestore/client.py | 141 +-- .../firestore_v1/services/firestore/pagers.py | 19 +- .../services/firestore/transports/README.rst | 3 - .../services/firestore/transports/__init__.py | 8 +- .../services/firestore/transports/base.py | 44 +- .../services/firestore/transports/grpc.py | 47 +- .../firestore/transports/grpc_asyncio.py | 68 +- .../services/firestore/transports/rest.py | 252 +----- .../firestore/transports/rest_base.py | 75 +- google/cloud/firestore_v1/types/__init__.py | 49 +- .../firestore_v1/types/aggregation_result.py | 5 +- .../cloud/firestore_v1/types/bloom_filter.py | 5 +- google/cloud/firestore_v1/types/common.py | 8 +- google/cloud/firestore_v1/types/document.py | 173 +--- .../cloud/firestore_v1/types/explain_stats.py | 57 -- google/cloud/firestore_v1/types/firestore.py | 172 +--- google/cloud/firestore_v1/types/pipeline.py | 64 -- google/cloud/firestore_v1/types/query.py | 13 +- .../cloud/firestore_v1/types/query_profile.py | 8 +- google/cloud/firestore_v1/types/write.py | 10 +- noxfile.py | 4 + scripts/fixup_firestore_admin_v1_keywords.py | 213 ----- scripts/fixup_firestore_v1_keywords.py | 197 ----- setup.py | 4 + tests/unit/gapic/__init__.py | 4 - tests/unit/gapic/bundle/__init__.py | 4 - .../unit/gapic/firestore_admin_v1/__init__.py | 4 - .../test_firestore_admin.py | 66 +- tests/unit/gapic/firestore_v1/__init__.py | 4 - .../unit/gapic/firestore_v1/test_firestore.py | 816 ++---------------- 58 files changed, 540 insertions(+), 2655 deletions(-) delete mode 100644 google/cloud/firestore_v1/types/explain_stats.py delete mode 100644 google/cloud/firestore_v1/types/pipeline.py delete mode 100644 scripts/fixup_firestore_admin_v1_keywords.py delete mode 100644 scripts/fixup_firestore_v1_keywords.py diff --git a/.librarian/state.yaml b/.librarian/state.yaml index 90c5eea66..06f3fc76e 100644 --- a/.librarian/state.yaml +++ b/.librarian/state.yaml @@ -1,4 +1,4 @@ -image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:eec191fc4904c204cd717c79812cd66997b5559776483ee223f69c8f43e99224 +image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:b8058df4c45e9a6e07f6b4d65b458d0d059241dd34c814f151c8bf6b89211209 libraries: - id: google-cloud-firestore version: 2.21.0 diff --git a/google/cloud/firestore_admin_v1/services/__init__.py b/google/cloud/firestore_admin_v1/services/__init__.py index 830ba9600..cbf94b283 100644 --- a/google/cloud/firestore_admin_v1/services/__init__.py +++ b/google/cloud/firestore_admin_v1/services/__init__.py @@ -13,6 +13,3 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py b/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py index fd0a3b050..41b9d63a9 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/__init__.py @@ -13,12 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - -from .async_client import FirestoreAdminAsyncClient from .client import FirestoreAdminClient +from .async_client import FirestoreAdminAsyncClient __all__ = ( "FirestoreAdminClient", diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py index 8952481f5..a2800e34e 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/async_client.py @@ -13,16 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - -from collections import OrderedDict import logging as std_logging +from collections import OrderedDict import re from typing import ( - Callable, Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -33,15 +29,16 @@ Union, ) +from google.cloud.firestore_admin_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore import google.protobuf -from google.cloud.firestore_admin_v1 import gapic_version as package_version try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] @@ -50,13 +47,6 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - from google.cloud.firestore_admin_v1.services.firestore_admin import pagers from google.cloud.firestore_admin_v1.types import backup from google.cloud.firestore_admin_v1.types import database @@ -70,10 +60,15 @@ from google.cloud.firestore_admin_v1.types import schedule from google.cloud.firestore_admin_v1.types import user_creds from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds - -from .client import FirestoreAdminClient -from .transports.base import DEFAULT_CLIENT_INFO, FirestoreAdminTransport +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport +from .client import FirestoreAdminClient try: from google.api_core import client_logging # type: ignore diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py index 15d093ab0..b55c157cf 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/client.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/client.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from collections import OrderedDict from http import HTTPStatus import json @@ -24,8 +20,8 @@ import os import re from typing import ( - Callable, Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -38,19 +34,19 @@ ) import warnings +from google.cloud.firestore_admin_v1 import gapic_version as package_version + from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore import google.protobuf -from google.cloud.firestore_admin_v1 import gapic_version as package_version - try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -67,13 +63,6 @@ from google.api_core import operation as gac_operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - from google.cloud.firestore_admin_v1.services.firestore_admin import pagers from google.cloud.firestore_admin_v1.types import backup from google.cloud.firestore_admin_v1.types import database @@ -87,8 +76,13 @@ from google.cloud.firestore_admin_v1.types import schedule from google.cloud.firestore_admin_v1.types import user_creds from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds - -from .transports.base import DEFAULT_CLIENT_INFO, FirestoreAdminTransport +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO from .transports.grpc import FirestoreAdminGrpcTransport from .transports.grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport from .transports.rest import FirestoreAdminRestTransport diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py b/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py index 6df96903f..ee8737c19 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/pagers.py @@ -13,26 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, Awaitable, Callable, - Iterator, - Optional, Sequence, Tuple, + Optional, + Iterator, Union, ) -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async - try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] OptionalAsyncRetry = Union[ @@ -42,7 +37,9 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.cloud.firestore_admin_v1.types import field, firestore_admin, index +from google.cloud.firestore_admin_v1.types import field +from google.cloud.firestore_admin_v1.types import firestore_admin +from google.cloud.firestore_admin_v1.types import index class ListIndexesPager: diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/README.rst b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/README.rst index b072243bd..ffcad7a89 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/README.rst +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/README.rst @@ -1,6 +1,3 @@ -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - transport inheritance structure _______________________________ diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py index 551488d65..36eaee23d 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/__init__.py @@ -13,17 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from collections import OrderedDict from typing import Dict, Type from .base import FirestoreAdminTransport from .grpc import FirestoreAdminGrpcTransport from .grpc_asyncio import FirestoreAdminGrpcAsyncIOTransport -from .rest import FirestoreAdminRestInterceptor, FirestoreAdminRestTransport +from .rest import FirestoreAdminRestTransport +from .rest import FirestoreAdminRestInterceptor + # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreAdminTransport]] diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py index dcf09159f..7d582d9b5 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/base.py @@ -13,36 +13,32 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +from google.cloud.firestore_admin_v1 import gapic_version as package_version + +import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1, operations_v1 +from google.api_core import gapic_v1 from google.api_core import retry as retries -import google.auth # type: ignore +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore import google.protobuf -from google.protobuf import empty_pb2 # type: ignore -from google.cloud.firestore_admin_v1 import gapic_version as package_version -from google.cloud.firestore_admin_v1.types import ( - backup, - database, - field, - firestore_admin, - index, - schedule, -) +from google.cloud.firestore_admin_v1.types import backup +from google.cloud.firestore_admin_v1.types import database +from google.cloud.firestore_admin_v1.types import field +from google.cloud.firestore_admin_v1.types import firestore_admin +from google.cloud.firestore_admin_v1.types import index +from google.cloud.firestore_admin_v1.types import schedule from google.cloud.firestore_admin_v1.types import user_creds from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py index 6f4ca7f70..f6531a190 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc.py @@ -13,40 +13,36 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - import json import logging as std_logging import pickle -from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1, grpc_helpers, operations_v1 +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message + import grpc # type: ignore import proto # type: ignore -from google.cloud.firestore_admin_v1.types import ( - backup, - database, - field, - firestore_admin, - index, - schedule, -) +from google.cloud.firestore_admin_v1.types import backup +from google.cloud.firestore_admin_v1.types import database +from google.cloud.firestore_admin_v1.types import field +from google.cloud.firestore_admin_v1.types import firestore_admin +from google.cloud.firestore_admin_v1.types import index +from google.cloud.firestore_admin_v1.types import schedule from google.cloud.firestore_admin_v1.types import user_creds from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds - -from .base import DEFAULT_CLIENT_INFO, FirestoreAdminTransport +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO try: from google.api_core import client_logging # type: ignore diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py index bb22cb2a4..117707853 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/grpc_asyncio.py @@ -13,43 +13,39 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - import inspect import json -import logging as std_logging import pickle -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import logging as std_logging import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.api_core import retry_async as retries +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message + import grpc # type: ignore -from grpc.experimental import aio # type: ignore import proto # type: ignore +from grpc.experimental import aio # type: ignore -from google.cloud.firestore_admin_v1.types import ( - backup, - database, - field, - firestore_admin, - index, - schedule, -) +from google.cloud.firestore_admin_v1.types import backup +from google.cloud.firestore_admin_v1.types import database +from google.cloud.firestore_admin_v1.types import field +from google.cloud.firestore_admin_v1.types import firestore_admin +from google.cloud.firestore_admin_v1.types import index +from google.cloud.firestore_admin_v1.types import schedule from google.cloud.firestore_admin_v1.types import user_creds from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds - -from .base import DEFAULT_CLIENT_INFO, FirestoreAdminTransport +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO from .grpc import FirestoreAdminGrpcTransport try: diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py index c8f1ecd5d..41e819c87 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest.py @@ -13,41 +13,42 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - -import dataclasses -import json # type: ignore import logging -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings +import json # type: ignore -from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 import google.protobuf -from google.protobuf import empty_pb2 # type: ignore + from google.protobuf import json_format +from google.api_core import operations_v1 +from google.cloud.location import locations_pb2 # type: ignore + from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings -from google.cloud.firestore_admin_v1.types import ( - backup, - database, - field, - firestore_admin, - index, - schedule, -) + +from google.cloud.firestore_admin_v1.types import backup +from google.cloud.firestore_admin_v1.types import database +from google.cloud.firestore_admin_v1.types import field +from google.cloud.firestore_admin_v1.types import firestore_admin +from google.cloud.firestore_admin_v1.types import index +from google.cloud.firestore_admin_v1.types import schedule from google.cloud.firestore_admin_v1.types import user_creds from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .rest_base import _BaseFirestoreAdminRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] diff --git a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest_base.py b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest_base.py index ae507e331..56b6ce93f 100644 --- a/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest_base.py +++ b/google/cloud/firestore_admin_v1/services/firestore_admin/transports/rest_base.py @@ -13,32 +13,28 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` +from google.protobuf import json_format +from google.cloud.location import locations_pb2 # type: ignore +from .base import FirestoreAdminTransport, DEFAULT_CLIENT_INFO -import json # type: ignore import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1, path_template -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import json_format -from google.cloud.firestore_admin_v1.types import ( - backup, - database, - field, - firestore_admin, - index, - schedule, -) +from google.cloud.firestore_admin_v1.types import backup +from google.cloud.firestore_admin_v1.types import database +from google.cloud.firestore_admin_v1.types import field +from google.cloud.firestore_admin_v1.types import firestore_admin +from google.cloud.firestore_admin_v1.types import index +from google.cloud.firestore_admin_v1.types import schedule from google.cloud.firestore_admin_v1.types import user_creds from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds - -from .base import DEFAULT_CLIENT_INFO, FirestoreAdminTransport +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore class _BaseFirestoreAdminRestTransport(FirestoreAdminTransport): diff --git a/google/cloud/firestore_admin_v1/types/__init__.py b/google/cloud/firestore_admin_v1/types/__init__.py index 11d2793ac..c76372e5d 100644 --- a/google/cloud/firestore_admin_v1/types/__init__.py +++ b/google/cloud/firestore_admin_v1/types/__init__.py @@ -13,13 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - -from .backup import Backup -from .database import Database -from .field import Field +from .backup import ( + Backup, +) +from .database import ( + Database, +) +from .field import ( + Field, +) from .firestore_admin import ( BulkDeleteDocumentsRequest, BulkDeleteDocumentsResponse, @@ -64,8 +66,12 @@ UpdateDatabaseRequest, UpdateFieldRequest, ) -from .index import Index -from .location import LocationMetadata +from .index import ( + Index, +) +from .location import ( + LocationMetadata, +) from .operation import ( BulkDeleteDocumentsMetadata, CloneDatabaseMetadata, @@ -74,13 +80,21 @@ FieldOperationMetadata, ImportDocumentsMetadata, IndexOperationMetadata, - OperationState, Progress, RestoreDatabaseMetadata, + OperationState, +) +from .schedule import ( + BackupSchedule, + DailyRecurrence, + WeeklyRecurrence, +) +from .snapshot import ( + PitrSnapshot, +) +from .user_creds import ( + UserCreds, ) -from .schedule import BackupSchedule, DailyRecurrence, WeeklyRecurrence -from .snapshot import PitrSnapshot -from .user_creds import UserCreds __all__ = ( "Backup", diff --git a/google/cloud/firestore_admin_v1/types/backup.py b/google/cloud/firestore_admin_v1/types/backup.py index 40b706bb7..02c594a22 100644 --- a/google/cloud/firestore_admin_v1/types/backup.py +++ b/google/cloud/firestore_admin_v1/types/backup.py @@ -13,17 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence -from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + __protobuf__ = proto.module( package="google.firestore.admin.v1", manifest={ diff --git a/google/cloud/firestore_admin_v1/types/database.py b/google/cloud/firestore_admin_v1/types/database.py index 5a65c40b0..f46bede62 100644 --- a/google/cloud/firestore_admin_v1/types/database.py +++ b/google/cloud/firestore_admin_v1/types/database.py @@ -13,17 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence +import proto # type: ignore + from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -import proto # type: ignore + __protobuf__ = proto.module( package="google.firestore.admin.v1", diff --git a/google/cloud/firestore_admin_v1/types/field.py b/google/cloud/firestore_admin_v1/types/field.py index d6f60a48d..824a9c87f 100644 --- a/google/cloud/firestore_admin_v1/types/field.py +++ b/google/cloud/firestore_admin_v1/types/field.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence @@ -25,6 +21,7 @@ from google.cloud.firestore_admin_v1.types import index + __protobuf__ = proto.module( package="google.firestore.admin.v1", manifest={ diff --git a/google/cloud/firestore_admin_v1/types/firestore_admin.py b/google/cloud/firestore_admin_v1/types/firestore_admin.py index ffe8268cc..9ede35cac 100644 --- a/google/cloud/firestore_admin_v1/types/firestore_admin.py +++ b/google/cloud/firestore_admin_v1/types/firestore_admin.py @@ -13,24 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.cloud.firestore_admin_v1.types import backup as gfa_backup from google.cloud.firestore_admin_v1.types import database as gfa_database from google.cloud.firestore_admin_v1.types import field as gfa_field from google.cloud.firestore_admin_v1.types import index as gfa_index -from google.cloud.firestore_admin_v1.types import schedule, snapshot +from google.cloud.firestore_admin_v1.types import schedule +from google.cloud.firestore_admin_v1.types import snapshot from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + __protobuf__ = proto.module( package="google.firestore.admin.v1", diff --git a/google/cloud/firestore_admin_v1/types/index.py b/google/cloud/firestore_admin_v1/types/index.py index ad789ee50..02f9f2647 100644 --- a/google/cloud/firestore_admin_v1/types/index.py +++ b/google/cloud/firestore_admin_v1/types/index.py @@ -13,16 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence import proto # type: ignore + __protobuf__ = proto.module( package="google.firestore.admin.v1", manifest={ diff --git a/google/cloud/firestore_admin_v1/types/location.py b/google/cloud/firestore_admin_v1/types/location.py index 0c8f65cb1..94ec17639 100644 --- a/google/cloud/firestore_admin_v1/types/location.py +++ b/google/cloud/firestore_admin_v1/types/location.py @@ -13,16 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence import proto # type: ignore + __protobuf__ = proto.module( package="google.firestore.admin.v1", manifest={ diff --git a/google/cloud/firestore_admin_v1/types/operation.py b/google/cloud/firestore_admin_v1/types/operation.py index f0d3e32c3..c50455693 100644 --- a/google/cloud/firestore_admin_v1/types/operation.py +++ b/google/cloud/firestore_admin_v1/types/operation.py @@ -13,19 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence -from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.cloud.firestore_admin_v1.types import index as gfa_index from google.cloud.firestore_admin_v1.types import snapshot +from google.protobuf import timestamp_pb2 # type: ignore + __protobuf__ = proto.module( package="google.firestore.admin.v1", diff --git a/google/cloud/firestore_admin_v1/types/schedule.py b/google/cloud/firestore_admin_v1/types/schedule.py index f773e1c16..a767edfe1 100644 --- a/google/cloud/firestore_admin_v1/types/schedule.py +++ b/google/cloud/firestore_admin_v1/types/schedule.py @@ -13,18 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence +import proto # type: ignore + from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.type import dayofweek_pb2 # type: ignore -import proto # type: ignore + __protobuf__ = proto.module( package="google.firestore.admin.v1", diff --git a/google/cloud/firestore_admin_v1/types/snapshot.py b/google/cloud/firestore_admin_v1/types/snapshot.py index d38fe0e55..e56a125f5 100644 --- a/google/cloud/firestore_admin_v1/types/snapshot.py +++ b/google/cloud/firestore_admin_v1/types/snapshot.py @@ -13,17 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence -from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + __protobuf__ = proto.module( package="google.firestore.admin.v1", manifest={ diff --git a/google/cloud/firestore_admin_v1/types/user_creds.py b/google/cloud/firestore_admin_v1/types/user_creds.py index 0168cce0d..39bd11947 100644 --- a/google/cloud/firestore_admin_v1/types/user_creds.py +++ b/google/cloud/firestore_admin_v1/types/user_creds.py @@ -13,17 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence -from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + __protobuf__ = proto.module( package="google.firestore.admin.v1", manifest={ diff --git a/google/cloud/firestore_bundle/__init__.py b/google/cloud/firestore_bundle/__init__.py index 1b6469437..30faafe58 100644 --- a/google/cloud/firestore_bundle/__init__.py +++ b/google/cloud/firestore_bundle/__init__.py @@ -15,8 +15,18 @@ # from google.cloud.firestore_bundle import gapic_version as package_version +import google.api_core as api_core +import sys + __version__ = package_version.__version__ +if sys.version_info >= (3, 8): # pragma: NO COVER + from importlib import metadata +else: # pragma: NO COVER + # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove + # this code path once we drop support for Python 3.7 + import importlib_metadata as metadata + from .types.bundle import BundledDocumentMetadata from .types.bundle import BundledQuery @@ -26,6 +36,100 @@ from .bundle import FirestoreBundle +if hasattr(api_core, "check_python_version") and hasattr( + api_core, "check_dependency_versions" +): # pragma: NO COVER + api_core.check_python_version("google.cloud.bundle") # type: ignore + api_core.check_dependency_versions("google.cloud.bundle") # type: ignore +else: # pragma: NO COVER + # An older version of api_core is installed which does not define the + # functions above. We do equivalent checks manually. + try: + import warnings + import sys + + _py_version_str = sys.version.split()[0] + _package_label = "google.cloud.bundle" + if sys.version_info < (3, 9): + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) + if sys.version_info[:2] == (3, 9): + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) + + def parse_version_to_tuple(version_string: str): + """Safely converts a semantic version string to a comparable tuple of integers. + Example: "4.25.8" -> (4, 25, 8) + Ignores non-numeric parts and handles common version formats. + Args: + version_string: Version string in the format "x.y.z" or "x.y.z" + Returns: + Tuple of integers for the parsed version string. + """ + parts = [] + for part in version_string.split("."): + try: + parts.append(int(part)) + except ValueError: + # If it's a non-numeric part (e.g., '1.0.0b1' -> 'b1'), stop here. + # This is a simplification compared to 'packaging.parse_version', but sufficient + # for comparing strictly numeric semantic versions. + break + return tuple(parts) + + def _get_version(dependency_name): + try: + version_string: str = metadata.version(dependency_name) + parsed_version = parse_version_to_tuple(version_string) + return (parsed_version, version_string) + except Exception: + # Catch exceptions from metadata.version() (e.g., PackageNotFoundError) + # or errors during parse_version_to_tuple + return (None, "--") + + _dependency_package = "google.protobuf" + _next_supported_version = "4.25.8" + _next_supported_version_tuple = (4, 25, 8) + _recommendation = " (we recommend 6.x)" + (_version_used, _version_used_string) = _get_version(_dependency_package) + if _version_used and _version_used < _next_supported_version_tuple: + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) + except Exception: + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) + __all__ = ( "BundleElement", "BundleMetadata", diff --git a/google/cloud/firestore_v1/services/__init__.py b/google/cloud/firestore_v1/services/__init__.py index 830ba9600..cbf94b283 100644 --- a/google/cloud/firestore_v1/services/__init__.py +++ b/google/cloud/firestore_v1/services/__init__.py @@ -13,6 +13,3 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` diff --git a/google/cloud/firestore_v1/services/firestore/__init__.py b/google/cloud/firestore_v1/services/firestore/__init__.py index 6a2d4510a..a69a11b29 100644 --- a/google/cloud/firestore_v1/services/firestore/__init__.py +++ b/google/cloud/firestore_v1/services/firestore/__init__.py @@ -13,12 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - -from .async_client import FirestoreAsyncClient from .client import FirestoreClient +from .async_client import FirestoreAsyncClient __all__ = ( "FirestoreClient", diff --git a/google/cloud/firestore_v1/services/firestore/async_client.py b/google/cloud/firestore_v1/services/firestore/async_client.py index 7a68a9b15..b904229b0 100644 --- a/google/cloud/firestore_v1/services/firestore/async_client.py +++ b/google/cloud/firestore_v1/services/firestore/async_client.py @@ -13,64 +13,57 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - -from collections import OrderedDict import logging as std_logging +from collections import OrderedDict import re from typing import ( - AsyncIterable, - AsyncIterator, - Awaitable, - Callable, Dict, + Callable, Mapping, MutableMapping, MutableSequence, Optional, + AsyncIterable, + Awaitable, + AsyncIterator, Sequence, Tuple, Type, Union, ) +from google.cloud.firestore_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore import google.protobuf -from google.cloud.firestore_v1 import gapic_version as package_version try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - from google.cloud.firestore_v1.services.firestore import pagers -from google.cloud.firestore_v1.types import ( - explain_stats, - firestore, - query, - query_profile, -) -from google.cloud.firestore_v1.types import aggregation_result, common +from google.cloud.firestore_v1.types import aggregation_result +from google.cloud.firestore_v1.types import common from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import document as gf_document +from google.cloud.firestore_v1.types import firestore +from google.cloud.firestore_v1.types import query +from google.cloud.firestore_v1.types import query_profile from google.cloud.firestore_v1.types import write as gf_write - -from .client import FirestoreClient -from .transports.base import DEFAULT_CLIENT_INFO, FirestoreTransport +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import FirestoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import FirestoreGrpcAsyncIOTransport +from .client import FirestoreClient try: from google.api_core import client_logging # type: ignore @@ -1255,109 +1248,6 @@ async def sample_run_query(): # Done; return the response. return response - def execute_pipeline( - self, - request: Optional[Union[firestore.ExecutePipelineRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Awaitable[AsyncIterable[firestore.ExecutePipelineResponse]]: - r"""Executes a pipeline query. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - async def sample_execute_pipeline(): - # Create a client - client = firestore_v1.FirestoreAsyncClient() - - # Initialize request argument(s) - structured_pipeline = firestore_v1.StructuredPipeline() - structured_pipeline.pipeline.stages.name = "name_value" - - request = firestore_v1.ExecutePipelineRequest( - structured_pipeline=structured_pipeline, - transaction=b'transaction_blob', - database="database_value", - ) - - # Make the request - stream = await client.execute_pipeline(request=request) - - # Handle the response - async for response in stream: - print(response) - - Args: - request (Optional[Union[google.cloud.firestore_v1.types.ExecutePipelineRequest, dict]]): - The request object. The request for - [Firestore.ExecutePipeline][google.firestore.v1.Firestore.ExecutePipeline]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - AsyncIterable[google.cloud.firestore_v1.types.ExecutePipelineResponse]: - The response for [Firestore.Execute][]. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.ExecutePipelineRequest): - request = firestore.ExecutePipelineRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.execute_pipeline - ] - - header_params = {} - - routing_param_regex = re.compile("^projects/(?P[^/]+)(?:/.*)?$") - regex_match = routing_param_regex.match(request.database) - if regex_match and regex_match.group("project_id"): - header_params["project_id"] = regex_match.group("project_id") - - routing_param_regex = re.compile( - "^projects/[^/]+/databases/(?P[^/]+)(?:/.*)?$" - ) - regex_match = routing_param_regex.match(request.database) - if regex_match and regex_match.group("database_id"): - header_params["database_id"] = regex_match.group("database_id") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - def run_aggregation_query( self, request: Optional[Union[firestore.RunAggregationQueryRequest, dict]] = None, diff --git a/google/cloud/firestore_v1/services/firestore/client.py b/google/cloud/firestore_v1/services/firestore/client.py index a4e77b28c..d6540c4ff 100644 --- a/google/cloud/firestore_v1/services/firestore/client.py +++ b/google/cloud/firestore_v1/services/firestore/client.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from collections import OrderedDict from http import HTTPStatus import json @@ -24,14 +20,14 @@ import os import re from typing import ( - Callable, Dict, - Iterable, - Iterator, + Callable, Mapping, MutableMapping, MutableSequence, Optional, + Iterable, + Iterator, Sequence, Tuple, Type, @@ -40,19 +36,19 @@ ) import warnings +from google.cloud.firestore_v1 import gapic_version as package_version + from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore import google.protobuf -from google.cloud.firestore_v1 import gapic_version as package_version - try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -67,24 +63,20 @@ _LOGGER = std_logging.getLogger(__name__) -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - from google.cloud.firestore_v1.services.firestore import pagers -from google.cloud.firestore_v1.types import ( - explain_stats, - firestore, - query, - query_profile, -) -from google.cloud.firestore_v1.types import aggregation_result, common +from google.cloud.firestore_v1.types import aggregation_result +from google.cloud.firestore_v1.types import common from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import document as gf_document +from google.cloud.firestore_v1.types import firestore +from google.cloud.firestore_v1.types import query +from google.cloud.firestore_v1.types import query_profile from google.cloud.firestore_v1.types import write as gf_write - -from .transports.base import DEFAULT_CLIENT_INFO, FirestoreTransport +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import FirestoreTransport, DEFAULT_CLIENT_INFO from .transports.grpc import FirestoreGrpcTransport from .transports.grpc_asyncio import FirestoreGrpcAsyncIOTransport from .transports.rest import FirestoreRestTransport @@ -1657,107 +1649,6 @@ def sample_run_query(): # Done; return the response. return response - def execute_pipeline( - self, - request: Optional[Union[firestore.ExecutePipelineRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Iterable[firestore.ExecutePipelineResponse]: - r"""Executes a pipeline query. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import firestore_v1 - - def sample_execute_pipeline(): - # Create a client - client = firestore_v1.FirestoreClient() - - # Initialize request argument(s) - structured_pipeline = firestore_v1.StructuredPipeline() - structured_pipeline.pipeline.stages.name = "name_value" - - request = firestore_v1.ExecutePipelineRequest( - structured_pipeline=structured_pipeline, - transaction=b'transaction_blob', - database="database_value", - ) - - # Make the request - stream = client.execute_pipeline(request=request) - - # Handle the response - for response in stream: - print(response) - - Args: - request (Union[google.cloud.firestore_v1.types.ExecutePipelineRequest, dict]): - The request object. The request for - [Firestore.ExecutePipeline][google.firestore.v1.Firestore.ExecutePipeline]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - Iterable[google.cloud.firestore_v1.types.ExecutePipelineResponse]: - The response for [Firestore.Execute][]. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, firestore.ExecutePipelineRequest): - request = firestore.ExecutePipelineRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.execute_pipeline] - - header_params = {} - - routing_param_regex = re.compile("^projects/(?P[^/]+)(?:/.*)?$") - regex_match = routing_param_regex.match(request.database) - if regex_match and regex_match.group("project_id"): - header_params["project_id"] = regex_match.group("project_id") - - routing_param_regex = re.compile( - "^projects/[^/]+/databases/(?P[^/]+)(?:/.*)?$" - ) - regex_match = routing_param_regex.match(request.database) - if regex_match and regex_match.group("database_id"): - header_params["database_id"] = regex_match.group("database_id") - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - def run_aggregation_query( self, request: Optional[Union[firestore.RunAggregationQueryRequest, dict]] = None, diff --git a/google/cloud/firestore_v1/services/firestore/pagers.py b/google/cloud/firestore_v1/services/firestore/pagers.py index 792aa9fed..be9e4b714 100644 --- a/google/cloud/firestore_v1/services/firestore/pagers.py +++ b/google/cloud/firestore_v1/services/firestore/pagers.py @@ -13,26 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, Awaitable, Callable, - Iterator, - Optional, Sequence, Tuple, + Optional, + Iterator, Union, ) -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async - try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] OptionalAsyncRetry = Union[ @@ -42,7 +37,9 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.cloud.firestore_v1.types import document, firestore, query +from google.cloud.firestore_v1.types import document +from google.cloud.firestore_v1.types import firestore +from google.cloud.firestore_v1.types import query class ListDocumentsPager: diff --git a/google/cloud/firestore_v1/services/firestore/transports/README.rst b/google/cloud/firestore_v1/services/firestore/transports/README.rst index 78fc6ab87..1823b6773 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/README.rst +++ b/google/cloud/firestore_v1/services/firestore/transports/README.rst @@ -1,6 +1,3 @@ -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - transport inheritance structure _______________________________ diff --git a/google/cloud/firestore_v1/services/firestore/transports/__init__.py b/google/cloud/firestore_v1/services/firestore/transports/__init__.py index 0e440d206..f3ca95f79 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/__init__.py +++ b/google/cloud/firestore_v1/services/firestore/transports/__init__.py @@ -13,17 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from collections import OrderedDict from typing import Dict, Type from .base import FirestoreTransport from .grpc import FirestoreGrpcTransport from .grpc_asyncio import FirestoreGrpcAsyncIOTransport -from .rest import FirestoreRestInterceptor, FirestoreRestTransport +from .rest import FirestoreRestTransport +from .rest import FirestoreRestInterceptor + # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreTransport]] diff --git a/google/cloud/firestore_v1/services/firestore/transports/base.py b/google/cloud/firestore_v1/services/firestore/transports/base.py index 87c70640c..02d6c0bbc 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/base.py +++ b/google/cloud/firestore_v1/services/firestore/transports/base.py @@ -13,29 +13,26 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +from google.cloud.firestore_v1 import gapic_version as package_version + +import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore import google.protobuf -from google.protobuf import empty_pb2 # type: ignore -from google.cloud.firestore_v1 import gapic_version as package_version from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import document as gf_document from google.cloud.firestore_v1.types import firestore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -294,23 +291,6 @@ def _prep_wrapped_messages(self, client_info): default_timeout=300.0, client_info=client_info, ), - self.execute_pipeline: gapic_v1.method.wrap_method( - self.execute_pipeline, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), self.run_aggregation_query: gapic_v1.method.wrap_method( self.run_aggregation_query, default_retry=retries.Retry( @@ -534,18 +514,6 @@ def run_query( ]: raise NotImplementedError() - @property - def execute_pipeline( - self, - ) -> Callable[ - [firestore.ExecutePipelineRequest], - Union[ - firestore.ExecutePipelineResponse, - Awaitable[firestore.ExecutePipelineResponse], - ], - ]: - raise NotImplementedError() - @property def run_aggregation_query( self, diff --git a/google/cloud/firestore_v1/services/firestore/transports/grpc.py b/google/cloud/firestore_v1/services/firestore/transports/grpc.py index 5439c94a6..3c5bded2d 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/grpc.py +++ b/google/cloud/firestore_v1/services/firestore/transports/grpc.py @@ -13,33 +13,30 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - import json import logging as std_logging import pickle -from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1, grpc_helpers +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message + import grpc # type: ignore import proto # type: ignore from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import document as gf_document from google.cloud.firestore_v1.types import firestore - -from .base import DEFAULT_CLIENT_INFO, FirestoreTransport +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import FirestoreTransport, DEFAULT_CLIENT_INFO try: from google.api_core import client_logging # type: ignore @@ -576,34 +573,6 @@ def run_query( ) return self._stubs["run_query"] - @property - def execute_pipeline( - self, - ) -> Callable[ - [firestore.ExecutePipelineRequest], firestore.ExecutePipelineResponse - ]: - r"""Return a callable for the execute pipeline method over gRPC. - - Executes a pipeline query. - - Returns: - Callable[[~.ExecutePipelineRequest], - ~.ExecutePipelineResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "execute_pipeline" not in self._stubs: - self._stubs["execute_pipeline"] = self._logged_channel.unary_stream( - "/google.firestore.v1.Firestore/ExecutePipeline", - request_serializer=firestore.ExecutePipelineRequest.serialize, - response_deserializer=firestore.ExecutePipelineResponse.deserialize, - ) - return self._stubs["execute_pipeline"] - @property def run_aggregation_query( self, diff --git a/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py b/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py index 6fd4b5bac..6cc93e21a 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py +++ b/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py @@ -13,36 +13,33 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - import inspect import json -import logging as std_logging import pickle -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import logging as std_logging import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1, grpc_helpers_async from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message + import grpc # type: ignore -from grpc.experimental import aio # type: ignore import proto # type: ignore +from grpc.experimental import aio # type: ignore from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import document as gf_document from google.cloud.firestore_v1.types import firestore - -from .base import DEFAULT_CLIENT_INFO, FirestoreTransport +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import FirestoreTransport, DEFAULT_CLIENT_INFO from .grpc import FirestoreGrpcTransport try: @@ -592,34 +589,6 @@ def run_query( ) return self._stubs["run_query"] - @property - def execute_pipeline( - self, - ) -> Callable[ - [firestore.ExecutePipelineRequest], Awaitable[firestore.ExecutePipelineResponse] - ]: - r"""Return a callable for the execute pipeline method over gRPC. - - Executes a pipeline query. - - Returns: - Callable[[~.ExecutePipelineRequest], - Awaitable[~.ExecutePipelineResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "execute_pipeline" not in self._stubs: - self._stubs["execute_pipeline"] = self._logged_channel.unary_stream( - "/google.firestore.v1.Firestore/ExecutePipeline", - request_serializer=firestore.ExecutePipelineRequest.serialize, - response_deserializer=firestore.ExecutePipelineResponse.deserialize, - ) - return self._stubs["execute_pipeline"] - @property def run_aggregation_query( self, @@ -995,23 +964,6 @@ def _prep_wrapped_messages(self, client_info): default_timeout=300.0, client_info=client_info, ), - self.execute_pipeline: self._wrap_method( - self.execute_pipeline, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), self.run_aggregation_query: self._wrap_method( self.run_aggregation_query, default_retry=retries.AsyncRetry( diff --git a/google/cloud/firestore_v1/services/firestore/transports/rest.py b/google/cloud/firestore_v1/services/firestore/transports/rest.py index 73f10dcf2..a32a7e84e 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/rest.py +++ b/google/cloud/firestore_v1/services/firestore/transports/rest.py @@ -13,34 +13,36 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - -import dataclasses -import json # type: ignore import logging -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings +import json # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1, rest_helpers, rest_streaming from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 import google.protobuf -from google.protobuf import empty_pb2 # type: ignore + from google.protobuf import json_format +from google.cloud.location import locations_pb2 # type: ignore + from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import document as gf_document from google.cloud.firestore_v1.types import firestore +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .rest_base import _BaseFirestoreRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -125,14 +127,6 @@ def pre_delete_document(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def pre_execute_pipeline(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_execute_pipeline(self, response): - logging.log(f"Received response: {response}") - return response - def pre_get_document(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -451,56 +445,6 @@ def pre_delete_document( """ return request, metadata - def pre_execute_pipeline( - self, - request: firestore.ExecutePipelineRequest, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - firestore.ExecutePipelineRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for execute_pipeline - - Override in a subclass to manipulate the request or metadata - before they are sent to the Firestore server. - """ - return request, metadata - - def post_execute_pipeline( - self, response: rest_streaming.ResponseIterator - ) -> rest_streaming.ResponseIterator: - """Post-rpc interceptor for execute_pipeline - - DEPRECATED. Please use the `post_execute_pipeline_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Firestore server but before - it is returned to user code. This `post_execute_pipeline` interceptor runs - before the `post_execute_pipeline_with_metadata` interceptor. - """ - return response - - def post_execute_pipeline_with_metadata( - self, - response: rest_streaming.ResponseIterator, - metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Post-rpc interceptor for execute_pipeline - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Firestore server but before it is returned to user code. - - We recommend only using this `post_execute_pipeline_with_metadata` - interceptor in new development instead of the `post_execute_pipeline` interceptor. - When both interceptors are used, this `post_execute_pipeline_with_metadata` interceptor runs after the - `post_execute_pipeline` interceptor. The (possibly modified) response returned by - `post_execute_pipeline` will be passed to - `post_execute_pipeline_with_metadata`. - """ - return response, metadata - def pre_get_document( self, request: firestore.GetDocumentRequest, @@ -1929,158 +1873,6 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _ExecutePipeline( - _BaseFirestoreRestTransport._BaseExecutePipeline, FirestoreRestStub - ): - def __hash__(self): - return hash("FirestoreRestTransport.ExecutePipeline") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None, - ): - uri = transcoded_request["uri"] - method = transcoded_request["method"] - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - stream=True, - ) - return response - - def __call__( - self, - request: firestore.ExecutePipelineRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> rest_streaming.ResponseIterator: - r"""Call the execute pipeline method over HTTP. - - Args: - request (~.firestore.ExecutePipelineRequest): - The request object. The request for - [Firestore.ExecutePipeline][google.firestore.v1.Firestore.ExecutePipeline]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.firestore.ExecutePipelineResponse: - The response for [Firestore.Execute][]. - """ - - http_options = ( - _BaseFirestoreRestTransport._BaseExecutePipeline._get_http_options() - ) - - request, metadata = self._interceptor.pre_execute_pipeline( - request, metadata - ) - transcoded_request = _BaseFirestoreRestTransport._BaseExecutePipeline._get_transcoded_request( - http_options, request - ) - - body = ( - _BaseFirestoreRestTransport._BaseExecutePipeline._get_request_body_json( - transcoded_request - ) - ) - - # Jsonify the query params - query_params = ( - _BaseFirestoreRestTransport._BaseExecutePipeline._get_query_params_json( - transcoded_request - ) - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - request_url = "{host}{uri}".format( - host=self._host, uri=transcoded_request["uri"] - ) - method = transcoded_request["method"] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.firestore_v1.FirestoreClient.ExecutePipeline", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "ExecutePipeline", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = FirestoreRestTransport._ExecutePipeline._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = rest_streaming.ResponseIterator( - response, firestore.ExecutePipelineResponse - ) - - resp = self._interceptor.post_execute_pipeline(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_execute_pipeline_with_metadata( - resp, response_metadata - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( - logging.DEBUG - ): # pragma: NO COVER - http_response = { - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.firestore_v1.FirestoreClient.execute_pipeline", - extra={ - "serviceName": "google.firestore.v1.Firestore", - "rpcName": "ExecutePipeline", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - class _GetDocument(_BaseFirestoreRestTransport._BaseGetDocument, FirestoreRestStub): def __hash__(self): return hash("FirestoreRestTransport.GetDocument") @@ -3351,16 +3143,6 @@ def delete_document( # In C++ this would require a dynamic_cast return self._DeleteDocument(self._session, self._host, self._interceptor) # type: ignore - @property - def execute_pipeline( - self, - ) -> Callable[ - [firestore.ExecutePipelineRequest], firestore.ExecutePipelineResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ExecutePipeline(self._session, self._host, self._interceptor) # type: ignore - @property def get_document( self, diff --git a/google/cloud/firestore_v1/services/firestore/transports/rest_base.py b/google/cloud/firestore_v1/services/firestore/transports/rest_base.py index 856b11f48..1d95cd16e 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/rest_base.py +++ b/google/cloud/firestore_v1/services/firestore/transports/rest_base.py @@ -13,25 +13,23 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` +from google.protobuf import json_format +from google.cloud.location import locations_pb2 # type: ignore +from .base import FirestoreTransport, DEFAULT_CLIENT_INFO -import json # type: ignore import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1, path_template -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import json_format from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import document as gf_document from google.cloud.firestore_v1.types import firestore - -from .base import DEFAULT_CLIENT_INFO, FirestoreTransport +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore class _BaseFirestoreRestTransport(FirestoreTransport): @@ -428,63 +426,6 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params - class _BaseExecutePipeline: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{database=projects/*/databases/*}/documents:executePipeline", - "body": "*", - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = firestore.ExecutePipelineRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True - ) - return body - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update( - _BaseFirestoreRestTransport._BaseExecutePipeline._get_unset_required_fields( - query_params - ) - ) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - class _BaseGetDocument: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/google/cloud/firestore_v1/types/__init__.py b/google/cloud/firestore_v1/types/__init__.py index 2c15b8287..ae1004e13 100644 --- a/google/cloud/firestore_v1/types/__init__.py +++ b/google/cloud/firestore_v1/types/__init__.py @@ -13,15 +13,24 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - -from .aggregation_result import AggregationResult -from .bloom_filter import BitSequence, BloomFilter -from .common import DocumentMask, Precondition, TransactionOptions -from .document import ArrayValue, Document, Function, MapValue, Pipeline, Value -from .explain_stats import ExplainStats +from .aggregation_result import ( + AggregationResult, +) +from .bloom_filter import ( + BitSequence, + BloomFilter, +) +from .common import ( + DocumentMask, + Precondition, + TransactionOptions, +) +from .document import ( + ArrayValue, + Document, + MapValue, + Value, +) from .firestore import ( BatchGetDocumentsRequest, BatchGetDocumentsResponse, @@ -33,8 +42,6 @@ CommitResponse, CreateDocumentRequest, DeleteDocumentRequest, - ExecutePipelineRequest, - ExecutePipelineResponse, GetDocumentRequest, ListCollectionIdsRequest, ListCollectionIdsResponse, @@ -55,9 +62,17 @@ WriteRequest, WriteResponse, ) -from .pipeline import StructuredPipeline -from .query import Cursor, StructuredAggregationQuery, StructuredQuery -from .query_profile import ExecutionStats, ExplainMetrics, ExplainOptions, PlanSummary +from .query import ( + Cursor, + StructuredAggregationQuery, + StructuredQuery, +) +from .query_profile import ( + ExecutionStats, + ExplainMetrics, + ExplainOptions, + PlanSummary, +) from .write import ( DocumentChange, DocumentDelete, @@ -77,11 +92,8 @@ "TransactionOptions", "ArrayValue", "Document", - "Function", "MapValue", - "Pipeline", "Value", - "ExplainStats", "BatchGetDocumentsRequest", "BatchGetDocumentsResponse", "BatchWriteRequest", @@ -92,8 +104,6 @@ "CommitResponse", "CreateDocumentRequest", "DeleteDocumentRequest", - "ExecutePipelineRequest", - "ExecutePipelineResponse", "GetDocumentRequest", "ListCollectionIdsRequest", "ListCollectionIdsResponse", @@ -113,7 +123,6 @@ "UpdateDocumentRequest", "WriteRequest", "WriteResponse", - "StructuredPipeline", "Cursor", "StructuredAggregationQuery", "StructuredQuery", diff --git a/google/cloud/firestore_v1/types/aggregation_result.py b/google/cloud/firestore_v1/types/aggregation_result.py index 39d0feb35..3c649dc8a 100644 --- a/google/cloud/firestore_v1/types/aggregation_result.py +++ b/google/cloud/firestore_v1/types/aggregation_result.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence @@ -25,6 +21,7 @@ from google.cloud.firestore_v1.types import document + __protobuf__ = proto.module( package="google.firestore.v1", manifest={ diff --git a/google/cloud/firestore_v1/types/bloom_filter.py b/google/cloud/firestore_v1/types/bloom_filter.py index f8e63548e..f38386cbe 100644 --- a/google/cloud/firestore_v1/types/bloom_filter.py +++ b/google/cloud/firestore_v1/types/bloom_filter.py @@ -13,16 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence import proto # type: ignore + __protobuf__ = proto.module( package="google.firestore.v1", manifest={ diff --git a/google/cloud/firestore_v1/types/common.py b/google/cloud/firestore_v1/types/common.py index 03b77b017..01fb3d263 100644 --- a/google/cloud/firestore_v1/types/common.py +++ b/google/cloud/firestore_v1/types/common.py @@ -13,17 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence -from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + __protobuf__ = proto.module( package="google.firestore.v1", manifest={ diff --git a/google/cloud/firestore_v1/types/document.py b/google/cloud/firestore_v1/types/document.py index b3bb32e13..22fe79b73 100644 --- a/google/cloud/firestore_v1/types/document.py +++ b/google/cloud/firestore_v1/types/document.py @@ -13,18 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence +import proto # type: ignore + from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.type import latlng_pb2 # type: ignore -import proto # type: ignore + __protobuf__ = proto.module( package="google.firestore.v1", @@ -33,8 +31,6 @@ "Value", "ArrayValue", "MapValue", - "Function", - "Pipeline", }, ) @@ -187,37 +183,6 @@ class Value(proto.Message): map_value (google.cloud.firestore_v1.types.MapValue): A map value. - This field is a member of `oneof`_ ``value_type``. - field_reference_value (str): - Value which references a field. - - This is considered relative (vs absolute) since it only - refers to a field and not a field within a particular - document. - - **Requires:** - - - Must follow [field reference][FieldReference.field_path] - limitations. - - - Not allowed to be used when writing documents. - - This field is a member of `oneof`_ ``value_type``. - function_value (google.cloud.firestore_v1.types.Function): - A value that represents an unevaluated expression. - - **Requires:** - - - Not allowed to be used when writing documents. - - This field is a member of `oneof`_ ``value_type``. - pipeline_value (google.cloud.firestore_v1.types.Pipeline): - A value that represents an unevaluated pipeline. - - **Requires:** - - - Not allowed to be used when writing documents. - This field is a member of `oneof`_ ``value_type``. """ @@ -281,23 +246,6 @@ class Value(proto.Message): oneof="value_type", message="MapValue", ) - field_reference_value: str = proto.Field( - proto.STRING, - number=19, - oneof="value_type", - ) - function_value: "Function" = proto.Field( - proto.MESSAGE, - number=20, - oneof="value_type", - message="Function", - ) - pipeline_value: "Pipeline" = proto.Field( - proto.MESSAGE, - number=21, - oneof="value_type", - message="Pipeline", - ) class ArrayValue(proto.Message): @@ -337,119 +285,4 @@ class MapValue(proto.Message): ) -class Function(proto.Message): - r"""Represents an unevaluated scalar expression. - - For example, the expression ``like(user_name, "%alice%")`` is - represented as: - - :: - - name: "like" - args { field_reference: "user_name" } - args { string_value: "%alice%" } - - Attributes: - name (str): - Required. The name of the function to evaluate. - - **Requires:** - - - must be in snake case (lower case with underscore - separator). - args (MutableSequence[google.cloud.firestore_v1.types.Value]): - Optional. Ordered list of arguments the given - function expects. - options (MutableMapping[str, google.cloud.firestore_v1.types.Value]): - Optional. Optional named arguments that - certain functions may support. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - args: MutableSequence["Value"] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="Value", - ) - options: MutableMapping[str, "Value"] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=3, - message="Value", - ) - - -class Pipeline(proto.Message): - r"""A Firestore query represented as an ordered list of - operations / stages. - - Attributes: - stages (MutableSequence[google.cloud.firestore_v1.types.Pipeline.Stage]): - Required. Ordered list of stages to evaluate. - """ - - class Stage(proto.Message): - r"""A single operation within a pipeline. - - A stage is made up of a unique name, and a list of arguments. The - exact number of arguments & types is dependent on the stage type. - - To give an example, the stage ``filter(state = "MD")`` would be - encoded as: - - :: - - name: "filter" - args { - function_value { - name: "eq" - args { field_reference_value: "state" } - args { string_value: "MD" } - } - } - - See public documentation for the full list. - - Attributes: - name (str): - Required. The name of the stage to evaluate. - - **Requires:** - - - must be in snake case (lower case with underscore - separator). - args (MutableSequence[google.cloud.firestore_v1.types.Value]): - Optional. Ordered list of arguments the given - stage expects. - options (MutableMapping[str, google.cloud.firestore_v1.types.Value]): - Optional. Optional named arguments that - certain functions may support. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - args: MutableSequence["Value"] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="Value", - ) - options: MutableMapping[str, "Value"] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=3, - message="Value", - ) - - stages: MutableSequence[Stage] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=Stage, - ) - - __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1/types/explain_stats.py b/google/cloud/firestore_v1/types/explain_stats.py deleted file mode 100644 index 78198c329..000000000 --- a/google/cloud/firestore_v1/types/explain_stats.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -from google.protobuf import any_pb2 # type: ignore -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.firestore.v1", - manifest={ - "ExplainStats", - }, -) - - -class ExplainStats(proto.Message): - r"""Pipeline explain stats. - - Depending on the explain options in the original request, this - can contain the optimized plan and / or execution stats. - - Attributes: - data (google.protobuf.any_pb2.Any): - The format depends on the ``output_format`` options in the - request. - - Currently there are two supported options: ``TEXT`` and - ``JSON``. Both supply a ``google.protobuf.StringValue``. - """ - - data: any_pb2.Any = proto.Field( - proto.MESSAGE, - number=1, - message=any_pb2.Any, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1/types/firestore.py b/google/cloud/firestore_v1/types/firestore.py index 5e1087446..190f55d28 100644 --- a/google/cloud/firestore_v1/types/firestore.py +++ b/google/cloud/firestore_v1/types/firestore.py @@ -13,25 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore import proto # type: ignore -from google.cloud.firestore_v1.types import aggregation_result, common +from google.cloud.firestore_v1.types import aggregation_result +from google.cloud.firestore_v1.types import common from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import explain_stats as gf_explain_stats -from google.cloud.firestore_v1.types import pipeline from google.cloud.firestore_v1.types import query as gf_query -from google.cloud.firestore_v1.types import query_profile, write +from google.cloud.firestore_v1.types import query_profile +from google.cloud.firestore_v1.types import write +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + __protobuf__ = proto.module( package="google.firestore.v1", @@ -51,8 +48,6 @@ "RollbackRequest", "RunQueryRequest", "RunQueryResponse", - "ExecutePipelineRequest", - "ExecutePipelineResponse", "RunAggregationQueryRequest", "RunAggregationQueryResponse", "PartitionQueryRequest", @@ -840,151 +835,6 @@ class RunQueryResponse(proto.Message): ) -class ExecutePipelineRequest(proto.Message): - r"""The request for - [Firestore.ExecutePipeline][google.firestore.v1.Firestore.ExecutePipeline]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - database (str): - Required. Database identifier, in the form - ``projects/{project}/databases/{database}``. - structured_pipeline (google.cloud.firestore_v1.types.StructuredPipeline): - A pipelined operation. - - This field is a member of `oneof`_ ``pipeline_type``. - transaction (bytes): - Run the query within an already active - transaction. - The value here is the opaque transaction ID to - execute the query in. - - This field is a member of `oneof`_ ``consistency_selector``. - new_transaction (google.cloud.firestore_v1.types.TransactionOptions): - Execute the pipeline in a new transaction. - - The identifier of the newly created transaction - will be returned in the first response on the - stream. This defaults to a read-only - transaction. - - This field is a member of `oneof`_ ``consistency_selector``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Execute the pipeline in a snapshot - transaction at the given time. - This must be a microsecond precision timestamp - within the past one hour, or if Point-in-Time - Recovery is enabled, can additionally be a whole - minute timestamp within the past 7 days. - - This field is a member of `oneof`_ ``consistency_selector``. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - structured_pipeline: pipeline.StructuredPipeline = proto.Field( - proto.MESSAGE, - number=2, - oneof="pipeline_type", - message=pipeline.StructuredPipeline, - ) - transaction: bytes = proto.Field( - proto.BYTES, - number=5, - oneof="consistency_selector", - ) - new_transaction: common.TransactionOptions = proto.Field( - proto.MESSAGE, - number=6, - oneof="consistency_selector", - message=common.TransactionOptions, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - oneof="consistency_selector", - message=timestamp_pb2.Timestamp, - ) - - -class ExecutePipelineResponse(proto.Message): - r"""The response for [Firestore.Execute][]. - - Attributes: - transaction (bytes): - Newly created transaction identifier. - - This field is only specified as part of the first response - from the server, alongside the ``results`` field when the - original request specified - [ExecuteRequest.new_transaction][]. - results (MutableSequence[google.cloud.firestore_v1.types.Document]): - An ordered batch of results returned executing a pipeline. - - The batch size is variable, and can even be zero for when - only a partial progress message is returned. - - The fields present in the returned documents are only those - that were explicitly requested in the pipeline, this - includes those like - [``__name__``][google.firestore.v1.Document.name] and - [``__update_time__``][google.firestore.v1.Document.update_time]. - This is explicitly a divergence from ``Firestore.RunQuery`` - / ``Firestore.GetDocument`` RPCs which always return such - fields even when they are not specified in the - [``mask``][google.firestore.v1.DocumentMask]. - execution_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the results are valid. - - This is a (not strictly) monotonically increasing value - across multiple responses in the same stream. The API - guarantees that all previously returned results are still - valid at the latest ``execution_time``. This allows the API - consumer to treat the query if it ran at the latest - ``execution_time`` returned. - - If the query returns no results, a response with - ``execution_time`` and no ``results`` will be sent, and this - represents the time at which the operation was run. - explain_stats (google.cloud.firestore_v1.types.ExplainStats): - Query explain stats. - - This is present on the **last** response if the request - configured explain to run in 'analyze' or 'explain' mode in - the pipeline options. If the query does not return any - results, a response with ``explain_stats`` and no - ``results`` will still be sent. - """ - - transaction: bytes = proto.Field( - proto.BYTES, - number=1, - ) - results: MutableSequence[gf_document.Document] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=gf_document.Document, - ) - execution_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - explain_stats: gf_explain_stats.ExplainStats = proto.Field( - proto.MESSAGE, - number=4, - message=gf_explain_stats.ExplainStats, - ) - - class RunAggregationQueryRequest(proto.Message): r"""The request for [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. @@ -1566,9 +1416,9 @@ class Target(proto.Message): Note that if the client sends multiple ``AddTarget`` requests without an ID, the order of IDs returned in - ``TargetChange.target_ids`` are undefined. Therefore, - clients should provide a target ID instead of relying on the - server to assign one. + ``TargetChage.target_ids`` are undefined. Therefore, clients + should provide a target ID instead of relying on the server + to assign one. If ``target_id`` is non-zero, there must not be an existing active target on this stream with the same ID. diff --git a/google/cloud/firestore_v1/types/pipeline.py b/google/cloud/firestore_v1/types/pipeline.py deleted file mode 100644 index 5bb915d2e..000000000 --- a/google/cloud/firestore_v1/types/pipeline.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.firestore_v1.types import document - -__protobuf__ = proto.module( - package="google.firestore.v1", - manifest={ - "StructuredPipeline", - }, -) - - -class StructuredPipeline(proto.Message): - r"""A Firestore query represented as an ordered list of operations / - stages. - - This is considered the top-level function which plans and executes a - query. It is logically equivalent to ``query(stages, options)``, but - prevents the client from having to build a function wrapper. - - Attributes: - pipeline (google.cloud.firestore_v1.types.Pipeline): - Required. The pipeline query to execute. - options (MutableMapping[str, google.cloud.firestore_v1.types.Value]): - Optional. Optional query-level arguments. - """ - - pipeline: document.Pipeline = proto.Field( - proto.MESSAGE, - number=1, - message=document.Pipeline, - ) - options: MutableMapping[str, document.Value] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=2, - message=document.Value, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1/types/query.py b/google/cloud/firestore_v1/types/query.py index d0133be44..c2856d0b4 100644 --- a/google/cloud/firestore_v1/types/query.py +++ b/google/cloud/firestore_v1/types/query.py @@ -13,18 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence -from google.protobuf import wrappers_pb2 # type: ignore import proto # type: ignore from google.cloud.firestore_v1.types import document +from google.protobuf import wrappers_pb2 # type: ignore + __protobuf__ = proto.module( package="google.firestore.v1", @@ -558,9 +555,9 @@ class FindNearest(proto.Message): when the vectors are more similar, the comparison is inverted. - - For EUCLIDEAN, COSINE: - ``WHERE distance <= distance_threshold`` - - For DOT_PRODUCT: ``WHERE distance >= distance_threshold`` + - For EUCLIDEAN, COSINE: WHERE distance <= + distance_threshold + - For DOT_PRODUCT: WHERE distance >= distance_threshold """ class DistanceMeasure(proto.Enum): diff --git a/google/cloud/firestore_v1/types/query_profile.py b/google/cloud/firestore_v1/types/query_profile.py index 32a0dfa41..f93184ae3 100644 --- a/google/cloud/firestore_v1/types/query_profile.py +++ b/google/cloud/firestore_v1/types/query_profile.py @@ -13,17 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence +import proto # type: ignore + from google.protobuf import duration_pb2 # type: ignore from google.protobuf import struct_pb2 # type: ignore -import proto # type: ignore + __protobuf__ = proto.module( package="google.firestore.v1", diff --git a/google/cloud/firestore_v1/types/write.py b/google/cloud/firestore_v1/types/write.py index 2db5724c2..e393b9148 100644 --- a/google/cloud/firestore_v1/types/write.py +++ b/google/cloud/firestore_v1/types/write.py @@ -13,19 +13,17 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - from __future__ import annotations from typing import MutableMapping, MutableSequence -from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore -from google.cloud.firestore_v1.types import bloom_filter, common +from google.cloud.firestore_v1.types import bloom_filter +from google.cloud.firestore_v1.types import common from google.cloud.firestore_v1.types import document as gf_document +from google.protobuf import timestamp_pb2 # type: ignore + __protobuf__ = proto.module( package="google.firestore.v1", diff --git a/noxfile.py b/noxfile.py index 4fb209cbc..3e8b80770 100644 --- a/noxfile.py +++ b/noxfile.py @@ -14,6 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` +# The source of truth for this file is `.librarian/generator-input` + + # Generated by synthtool. DO NOT EDIT! from __future__ import absolute_import diff --git a/scripts/fixup_firestore_admin_v1_keywords.py b/scripts/fixup_firestore_admin_v1_keywords.py deleted file mode 100644 index 05bd87f0e..000000000 --- a/scripts/fixup_firestore_admin_v1_keywords.py +++ /dev/null @@ -1,213 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -try: - import libcst as cst -except ImportError: - raise ImportError('Run `python -m pip install "libcst >= 0.2.5"` to install libcst.') - - - -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class firestore_adminCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'bulk_delete_documents': ('name', 'collection_ids', 'namespace_ids', ), - 'clone_database': ('parent', 'database_id', 'pitr_snapshot', 'encryption_config', 'tags', ), - 'create_backup_schedule': ('parent', 'backup_schedule', ), - 'create_database': ('parent', 'database', 'database_id', ), - 'create_index': ('parent', 'index', ), - 'create_user_creds': ('parent', 'user_creds', 'user_creds_id', ), - 'delete_backup': ('name', ), - 'delete_backup_schedule': ('name', ), - 'delete_database': ('name', 'etag', ), - 'delete_index': ('name', ), - 'delete_user_creds': ('name', ), - 'disable_user_creds': ('name', ), - 'enable_user_creds': ('name', ), - 'export_documents': ('name', 'collection_ids', 'output_uri_prefix', 'namespace_ids', 'snapshot_time', ), - 'get_backup': ('name', ), - 'get_backup_schedule': ('name', ), - 'get_database': ('name', ), - 'get_field': ('name', ), - 'get_index': ('name', ), - 'get_user_creds': ('name', ), - 'import_documents': ('name', 'collection_ids', 'input_uri_prefix', 'namespace_ids', ), - 'list_backups': ('parent', 'filter', ), - 'list_backup_schedules': ('parent', ), - 'list_databases': ('parent', 'show_deleted', ), - 'list_fields': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_indexes': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_user_creds': ('parent', ), - 'reset_user_password': ('name', ), - 'restore_database': ('parent', 'database_id', 'backup', 'encryption_config', 'tags', ), - 'update_backup_schedule': ('backup_schedule', 'update_mask', ), - 'update_database': ('database', 'update_mask', ), - 'update_field': ('field', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=firestore_adminCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the firestore_admin client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/scripts/fixup_firestore_v1_keywords.py b/scripts/fixup_firestore_v1_keywords.py deleted file mode 100644 index 6481e76bb..000000000 --- a/scripts/fixup_firestore_v1_keywords.py +++ /dev/null @@ -1,197 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -try: - import libcst as cst -except ImportError: - raise ImportError('Run `python -m pip install "libcst >= 0.2.5"` to install libcst.') - - - -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class firestoreCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'batch_get_documents': ('database', 'documents', 'mask', 'transaction', 'new_transaction', 'read_time', ), - 'batch_write': ('database', 'writes', 'labels', ), - 'begin_transaction': ('database', 'options', ), - 'commit': ('database', 'writes', 'transaction', ), - 'create_document': ('parent', 'collection_id', 'document', 'document_id', 'mask', ), - 'delete_document': ('name', 'current_document', ), - 'get_document': ('name', 'mask', 'transaction', 'read_time', ), - 'list_collection_ids': ('parent', 'page_size', 'page_token', 'read_time', ), - 'list_documents': ('parent', 'collection_id', 'page_size', 'page_token', 'order_by', 'mask', 'transaction', 'read_time', 'show_missing', ), - 'listen': ('database', 'add_target', 'remove_target', 'labels', ), - 'partition_query': ('parent', 'structured_query', 'partition_count', 'page_token', 'page_size', 'read_time', ), - 'rollback': ('database', 'transaction', ), - 'run_aggregation_query': ('parent', 'structured_aggregation_query', 'transaction', 'new_transaction', 'read_time', 'explain_options', ), - 'run_query': ('parent', 'structured_query', 'transaction', 'new_transaction', 'read_time', 'explain_options', ), - 'update_document': ('document', 'update_mask', 'mask', 'current_document', ), - 'write': ('database', 'stream_id', 'writes', 'stream_token', 'labels', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=firestoreCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the firestore client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/setup.py b/setup.py index 8625abce9..a3407d9a9 100644 --- a/setup.py +++ b/setup.py @@ -12,6 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` +# The source of truth for this file is `.librarian/generator-input` + + import io import os diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py index a352d758d..cbf94b283 100644 --- a/tests/unit/gapic/__init__.py +++ b/tests/unit/gapic/__init__.py @@ -1,7 +1,3 @@ -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/tests/unit/gapic/bundle/__init__.py b/tests/unit/gapic/bundle/__init__.py index a352d758d..cbf94b283 100644 --- a/tests/unit/gapic/bundle/__init__.py +++ b/tests/unit/gapic/bundle/__init__.py @@ -1,7 +1,3 @@ -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/tests/unit/gapic/firestore_admin_v1/__init__.py b/tests/unit/gapic/firestore_admin_v1/__init__.py index a352d758d..cbf94b283 100644 --- a/tests/unit/gapic/firestore_admin_v1/__init__.py +++ b/tests/unit/gapic/firestore_admin_v1/__init__.py @@ -1,7 +1,3 @@ -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py b/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py index 3c045e7c6..7ef138d15 100644 --- a/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py +++ b/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - import os # try/except added for compatibility with python < 3.8 @@ -26,19 +22,20 @@ except ImportError: # pragma: NO COVER import mock -from collections.abc import AsyncIterable, Iterable +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format import json import math - +import pytest from google.api_core import api_core_version -from google.protobuf import json_format -import grpc -from grpc.experimental import aio -from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule -import pytest -from requests import PreparedRequest, Request, Response +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest from requests.sessions import Session +from google.protobuf import json_format try: from google.auth.aio import credentials as ga_credentials_async @@ -47,37 +44,27 @@ except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False -from google.api_core import ( - future, - gapic_v1, - grpc_helpers, - grpc_helpers_async, - operation, - operations_v1, - path_template, -) from google.api_core import client_options from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template from google.api_core import retry as retries -import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.location import locations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore - from google.cloud.firestore_admin_v1.services.firestore_admin import ( FirestoreAdminAsyncClient, +) +from google.cloud.firestore_admin_v1.services.firestore_admin import ( FirestoreAdminClient, - pagers, - transports, ) +from google.cloud.firestore_admin_v1.services.firestore_admin import pagers +from google.cloud.firestore_admin_v1.services.firestore_admin import transports from google.cloud.firestore_admin_v1.types import backup from google.cloud.firestore_admin_v1.types import database from google.cloud.firestore_admin_v1.types import database as gfa_database @@ -87,9 +74,20 @@ from google.cloud.firestore_admin_v1.types import index from google.cloud.firestore_admin_v1.types import index as gfa_index from google.cloud.firestore_admin_v1.types import operation as gfa_operation -from google.cloud.firestore_admin_v1.types import schedule, snapshot +from google.cloud.firestore_admin_v1.types import schedule +from google.cloud.firestore_admin_v1.types import snapshot from google.cloud.firestore_admin_v1.types import user_creds from google.cloud.firestore_admin_v1.types import user_creds as gfa_user_creds +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +import google.auth + CRED_INFO_JSON = { "credential_source": "/path/to/file", diff --git a/tests/unit/gapic/firestore_v1/__init__.py b/tests/unit/gapic/firestore_v1/__init__.py index a352d758d..cbf94b283 100644 --- a/tests/unit/gapic/firestore_v1/__init__.py +++ b/tests/unit/gapic/firestore_v1/__init__.py @@ -1,7 +1,3 @@ -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/tests/unit/gapic/firestore_v1/test_firestore.py b/tests/unit/gapic/firestore_v1/test_firestore.py index cffed614c..c5994da87 100644 --- a/tests/unit/gapic/firestore_v1/test_firestore.py +++ b/tests/unit/gapic/firestore_v1/test_firestore.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` -# The source of truth for this file is `.librarian/generator-input` - import os # try/except added for compatibility with python < 3.8 @@ -26,19 +22,20 @@ except ImportError: # pragma: NO COVER import mock -from collections.abc import AsyncIterable, Iterable +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format import json import math - +import pytest from google.api_core import api_core_version -from google.protobuf import json_format -import grpc -from grpc.experimental import aio -from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule -import pytest -from requests import PreparedRequest, Request, Response +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest from requests.sessions import Session +from google.protobuf import json_format try: from google.auth.aio import credentials as ga_credentials_async @@ -47,13 +44,27 @@ except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False -from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template from google.api_core import client_options from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.api_core import retry as retries -import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.cloud.firestore_v1.services.firestore import FirestoreAsyncClient +from google.cloud.firestore_v1.services.firestore import FirestoreClient +from google.cloud.firestore_v1.services.firestore import pagers +from google.cloud.firestore_v1.services.firestore import transports +from google.cloud.firestore_v1.types import aggregation_result +from google.cloud.firestore_v1.types import common +from google.cloud.firestore_v1.types import document +from google.cloud.firestore_v1.types import document as gf_document +from google.cloud.firestore_v1.types import firestore +from google.cloud.firestore_v1.types import query +from google.cloud.firestore_v1.types import query_profile +from google.cloud.firestore_v1.types import write as gf_write from google.cloud.location import locations_pb2 from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account @@ -62,24 +73,8 @@ from google.protobuf import wrappers_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore from google.type import latlng_pb2 # type: ignore +import google.auth -from google.cloud.firestore_v1.services.firestore import ( - FirestoreAsyncClient, - FirestoreClient, - pagers, - transports, -) -from google.cloud.firestore_v1.types import ( - explain_stats, - firestore, - pipeline, - query, - query_profile, -) -from google.cloud.firestore_v1.types import aggregation_result, common -from google.cloud.firestore_v1.types import document -from google.cloud.firestore_v1.types import document as gf_document -from google.cloud.firestore_v1.types import write as gf_write CRED_INFO_JSON = { "credential_source": "/path/to/file", @@ -4085,185 +4080,6 @@ async def test_run_query_field_headers_async(): ) in kw["metadata"] -@pytest.mark.parametrize( - "request_type", - [ - firestore.ExecutePipelineRequest, - dict, - ], -) -def test_execute_pipeline(request_type, transport: str = "grpc"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = iter([firestore.ExecutePipelineResponse()]) - response = client.execute_pipeline(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = firestore.ExecutePipelineRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, firestore.ExecutePipelineResponse) - - -def test_execute_pipeline_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = firestore.ExecutePipelineRequest( - database="database_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.execute_pipeline(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == firestore.ExecutePipelineRequest( - database="database_value", - ) - - -def test_execute_pipeline_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.execute_pipeline in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.execute_pipeline - ] = mock_rpc - request = {} - client.execute_pipeline(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.execute_pipeline(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_execute_pipeline_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.execute_pipeline - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.execute_pipeline - ] = mock_rpc - - request = {} - await client.execute_pipeline(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.execute_pipeline(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_execute_pipeline_async( - transport: str = "grpc_asyncio", request_type=firestore.ExecutePipelineRequest -): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.ExecutePipelineResponse()] - ) - response = await client.execute_pipeline(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = firestore.ExecutePipelineRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, firestore.ExecutePipelineResponse) - - -@pytest.mark.asyncio -async def test_execute_pipeline_async_from_dict(): - await test_execute_pipeline_async(request_type=dict) - - @pytest.mark.parametrize( "request_type", [ @@ -7790,7 +7606,7 @@ def test_run_query_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("parent",))) -def test_execute_pipeline_rest_use_cached_wrapped_rpc(): +def test_run_aggregation_query_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7804,7 +7620,10 @@ def test_execute_pipeline_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.execute_pipeline in client._transport._wrapped_methods + assert ( + client._transport.run_aggregation_query + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -7812,29 +7631,29 @@ def test_execute_pipeline_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.execute_pipeline + client._transport.run_aggregation_query ] = mock_rpc request = {} - client.execute_pipeline(request) + client.run_aggregation_query(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.execute_pipeline(request) + client.run_aggregation_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_execute_pipeline_rest_required_fields( - request_type=firestore.ExecutePipelineRequest, +def test_run_aggregation_query_rest_required_fields( + request_type=firestore.RunAggregationQueryRequest, ): transport_class = transports.FirestoreRestTransport request_init = {} - request_init["database"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7845,21 +7664,21 @@ def test_execute_pipeline_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).execute_pipeline._get_unset_required_fields(jsonified_request) + ).run_aggregation_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["database"] = "database_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).execute_pipeline._get_unset_required_fields(jsonified_request) + ).run_aggregation_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == "database_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = FirestoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7868,7 +7687,7 @@ def test_execute_pipeline_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = firestore.ExecutePipelineResponse() + return_value = firestore.RunAggregationQueryResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7890,7 +7709,7 @@ def test_execute_pipeline_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = firestore.ExecutePipelineResponse.pb(return_value) + return_value = firestore.RunAggregationQueryResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) json_return_value = "[{}]".format(json_return_value) @@ -7900,23 +7719,23 @@ def test_execute_pipeline_rest_required_fields( with mock.patch.object(response_value, "iter_content") as iter_content: iter_content.return_value = iter(json_return_value) - response = client.execute_pipeline(request) + response = client.run_aggregation_query(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_execute_pipeline_rest_unset_required_fields(): +def test_run_aggregation_query_rest_unset_required_fields(): transport = transports.FirestoreRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.execute_pipeline._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database",))) + unset_fields = transport.run_aggregation_query._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) -def test_run_aggregation_query_rest_use_cached_wrapped_rpc(): +def test_partition_query_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7930,35 +7749,30 @@ def test_run_aggregation_query_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.run_aggregation_query - in client._transport._wrapped_methods - ) + assert client._transport.partition_query in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.run_aggregation_query - ] = mock_rpc + client._transport._wrapped_methods[client._transport.partition_query] = mock_rpc request = {} - client.run_aggregation_query(request) + client.partition_query(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.run_aggregation_query(request) + client.partition_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_run_aggregation_query_rest_required_fields( - request_type=firestore.RunAggregationQueryRequest, +def test_partition_query_rest_required_fields( + request_type=firestore.PartitionQueryRequest, ): transport_class = transports.FirestoreRestTransport @@ -7974,7 +7788,7 @@ def test_run_aggregation_query_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).run_aggregation_query._get_unset_required_fields(jsonified_request) + ).partition_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -7983,7 +7797,7 @@ def test_run_aggregation_query_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).run_aggregation_query._get_unset_required_fields(jsonified_request) + ).partition_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7997,7 +7811,7 @@ def test_run_aggregation_query_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = firestore.RunAggregationQueryResponse() + return_value = firestore.PartitionQueryResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8019,158 +7833,34 @@ def test_run_aggregation_query_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = firestore.RunAggregationQueryResponse.pb(return_value) + return_value = firestore.PartitionQueryResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - with mock.patch.object(response_value, "iter_content") as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.run_aggregation_query(request) + response = client.partition_query(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_run_aggregation_query_rest_unset_required_fields(): +def test_partition_query_rest_unset_required_fields(): transport = transports.FirestoreRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.run_aggregation_query._get_unset_required_fields({}) + unset_fields = transport.partition_query._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("parent",))) -def test_partition_query_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.partition_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.partition_query] = mock_rpc - - request = {} - client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.partition_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_partition_query_rest_required_fields( - request_type=firestore.PartitionQueryRequest, -): - transport_class = transports.FirestoreRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).partition_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).partition_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = firestore.PartitionQueryResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.PartitionQueryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.partition_query(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_partition_query_rest_unset_required_fields(): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.partition_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) - - -def test_partition_query_rest_pager(transport: str = "rest"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_partition_query_rest_pager(transport: str = "rest"): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -9059,27 +8749,6 @@ def test_run_query_empty_call_grpc(): assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_execute_pipeline_empty_call_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: - call.return_value = iter([firestore.ExecutePipelineResponse()]) - client.execute_pipeline(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.ExecutePipelineRequest() - - assert args[0] == request_msg - - # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_run_aggregation_query_empty_call_grpc(): @@ -9189,60 +8858,6 @@ def test_create_document_empty_call_grpc(): assert args[0] == request_msg -def test_execute_pipeline_routing_parameters_request_1_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: - call.return_value = iter([firestore.ExecutePipelineResponse()]) - client.execute_pipeline(request={"database": "projects/sample1/sample2"}) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, kw = call.mock_calls[0] - request_msg = firestore.ExecutePipelineRequest( - **{"database": "projects/sample1/sample2"} - ) - - assert args[0] == request_msg - - expected_headers = {"project_id": "sample1"} - assert ( - gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] - ) - - -def test_execute_pipeline_routing_parameters_request_2_grpc(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: - call.return_value = iter([firestore.ExecutePipelineResponse()]) - client.execute_pipeline( - request={"database": "projects/sample1/databases/sample2/sample3"} - ) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, kw = call.mock_calls[0] - request_msg = firestore.ExecutePipelineRequest( - **{"database": "projects/sample1/databases/sample2/sample3"} - ) - - assert args[0] == request_msg - - expected_headers = {"project_id": "sample1", "database_id": "sample2"} - assert ( - gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] - ) - - def test_transport_kind_grpc_asyncio(): transport = FirestoreAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -9492,32 +9107,6 @@ async def test_run_query_empty_call_grpc_asyncio(): assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_execute_pipeline_empty_call_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.ExecutePipelineResponse()] - ) - await client.execute_pipeline(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.ExecutePipelineRequest() - - assert args[0] == request_msg - - # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -9655,70 +9244,6 @@ async def test_create_document_empty_call_grpc_asyncio(): assert args[0] == request_msg -@pytest.mark.asyncio -async def test_execute_pipeline_routing_parameters_request_1_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.ExecutePipelineResponse()] - ) - await client.execute_pipeline(request={"database": "projects/sample1/sample2"}) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, kw = call.mock_calls[0] - request_msg = firestore.ExecutePipelineRequest( - **{"database": "projects/sample1/sample2"} - ) - - assert args[0] == request_msg - - expected_headers = {"project_id": "sample1"} - assert ( - gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] - ) - - -@pytest.mark.asyncio -async def test_execute_pipeline_routing_parameters_request_2_grpc_asyncio(): - client = FirestoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.ExecutePipelineResponse()] - ) - await client.execute_pipeline( - request={"database": "projects/sample1/databases/sample2/sample3"} - ) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, kw = call.mock_calls[0] - request_msg = firestore.ExecutePipelineRequest( - **{"database": "projects/sample1/databases/sample2/sample3"} - ) - - assert args[0] == request_msg - - expected_headers = {"project_id": "sample1", "database_id": "sample2"} - assert ( - gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] - ) - - def test_transport_kind_rest(): transport = FirestoreClient.get_transport_class("rest")( credentials=ga_credentials.AnonymousCredentials() @@ -10904,137 +10429,6 @@ def test_run_query_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_execute_pipeline_rest_bad_request( - request_type=firestore.ExecutePipelineRequest, -): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.execute_pipeline(request) - - -@pytest.mark.parametrize( - "request_type", - [ - firestore.ExecutePipelineRequest, - dict, - ], -) -def test_execute_pipeline_rest_call_success(request_type): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"database": "projects/sample1/databases/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = firestore.ExecutePipelineResponse( - transaction=b"transaction_blob", - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = firestore.ExecutePipelineResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.execute_pipeline(request) - - assert isinstance(response, Iterable) - response = next(response) - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.ExecutePipelineResponse) - assert response.transaction == b"transaction_blob" - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_execute_pipeline_rest_interceptors(null_interceptor): - transport = transports.FirestoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), - ) - client = FirestoreClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.FirestoreRestInterceptor, "post_execute_pipeline" - ) as post, mock.patch.object( - transports.FirestoreRestInterceptor, "post_execute_pipeline_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.FirestoreRestInterceptor, "pre_execute_pipeline" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = firestore.ExecutePipelineRequest.pb( - firestore.ExecutePipelineRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = firestore.ExecutePipelineResponse.to_json( - firestore.ExecutePipelineResponse() - ) - req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) - - request = firestore.ExecutePipelineRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = firestore.ExecutePipelineResponse() - post_with_metadata.return_value = firestore.ExecutePipelineResponse(), metadata - - client.execute_pipeline( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - def test_run_aggregation_query_rest_bad_request( request_type=firestore.RunAggregationQueryRequest, ): @@ -12211,26 +11605,6 @@ def test_run_query_empty_call_rest(): assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_execute_pipeline_empty_call_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: - client.execute_pipeline(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = firestore.ExecutePipelineRequest() - - assert args[0] == request_msg - - # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_run_aggregation_query_empty_call_rest(): @@ -12335,58 +11709,6 @@ def test_create_document_empty_call_rest(): assert args[0] == request_msg -def test_execute_pipeline_routing_parameters_request_1_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: - client.execute_pipeline(request={"database": "projects/sample1/sample2"}) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, kw = call.mock_calls[0] - request_msg = firestore.ExecutePipelineRequest( - **{"database": "projects/sample1/sample2"} - ) - - assert args[0] == request_msg - - expected_headers = {"project_id": "sample1"} - assert ( - gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] - ) - - -def test_execute_pipeline_routing_parameters_request_2_rest(): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: - client.execute_pipeline( - request={"database": "projects/sample1/databases/sample2/sample3"} - ) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, kw = call.mock_calls[0] - request_msg = firestore.ExecutePipelineRequest( - **{"database": "projects/sample1/databases/sample2/sample3"} - ) - - assert args[0] == request_msg - - expected_headers = {"project_id": "sample1", "database_id": "sample2"} - assert ( - gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] - ) - - def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = FirestoreClient( @@ -12429,7 +11751,6 @@ def test_firestore_base_transport(): "commit", "rollback", "run_query", - "execute_pipeline", "run_aggregation_query", "partition_query", "write", @@ -12735,9 +12056,6 @@ def test_firestore_client_transport_session_collision(transport_name): session1 = client1.transport.run_query._session session2 = client2.transport.run_query._session assert session1 != session2 - session1 = client1.transport.execute_pipeline._session - session2 = client2.transport.execute_pipeline._session - assert session1 != session2 session1 = client1.transport.run_aggregation_query._session session2 = client2.transport.run_aggregation_query._session assert session1 != session2 From 368d1688022c85dbc64f74654849b8d5b1d562c6 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 16 Dec 2025 19:33:57 +0000 Subject: [PATCH 3/4] remove scripts from setup.py which is handwritten --- .librarian/generator-input/setup.py | 4 ---- setup.py | 4 ---- 2 files changed, 8 deletions(-) diff --git a/.librarian/generator-input/setup.py b/.librarian/generator-input/setup.py index 8625abce9..28d6faf51 100644 --- a/.librarian/generator-input/setup.py +++ b/.librarian/generator-input/setup.py @@ -90,10 +90,6 @@ install_requires=dependencies, extras_require=extras, python_requires=">=3.7", - scripts=[ - "scripts/fixup_firestore_v1_keywords.py", - "scripts/fixup_firestore_admin_v1_keywords.py", - ], include_package_data=True, zip_safe=False, ) diff --git a/setup.py b/setup.py index a3407d9a9..72a6f53bd 100644 --- a/setup.py +++ b/setup.py @@ -94,10 +94,6 @@ install_requires=dependencies, extras_require=extras, python_requires=">=3.7", - scripts=[ - "scripts/fixup_firestore_v1_keywords.py", - "scripts/fixup_firestore_admin_v1_keywords.py", - ], include_package_data=True, zip_safe=False, ) From f3498c78268df94bfde483059327471c49c3ac01 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 16 Dec 2025 20:23:14 +0000 Subject: [PATCH 4/4] restore changes from 1144 --- .librarian/state.yaml | 2 +- google/cloud/firestore_v1/gapic_metadata.json | 15 + .../services/firestore/async_client.py | 104 +++ .../firestore_v1/services/firestore/client.py | 102 +++ .../services/firestore/transports/base.py | 29 + .../services/firestore/transports/grpc.py | 28 + .../firestore/transports/grpc_asyncio.py | 45 + .../services/firestore/transports/rest.py | 220 +++++ .../firestore/transports/rest_base.py | 57 ++ google/cloud/firestore_v1/types/__init__.py | 16 + google/cloud/firestore_v1/types/document.py | 165 ++++ .../cloud/firestore_v1/types/explain_stats.py | 55 ++ google/cloud/firestore_v1/types/firestore.py | 155 +++- google/cloud/firestore_v1/types/pipeline.py | 61 ++ google/cloud/firestore_v1/types/query.py | 6 +- .../unit/gapic/firestore_v1/test_firestore.py | 827 ++++++++++++++++-- 16 files changed, 1806 insertions(+), 81 deletions(-) create mode 100644 google/cloud/firestore_v1/types/explain_stats.py create mode 100644 google/cloud/firestore_v1/types/pipeline.py diff --git a/.librarian/state.yaml b/.librarian/state.yaml index 06f3fc76e..e5b84f116 100644 --- a/.librarian/state.yaml +++ b/.librarian/state.yaml @@ -2,7 +2,7 @@ image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-li libraries: - id: google-cloud-firestore version: 2.21.0 - last_generated_commit: 659ea6e98acc7d58661ce2aa7b4cf76a7ef3fd42 + last_generated_commit: 1a9d00bed77e6db82ff67764ffe14e3b5209f5cd apis: - path: google/firestore/v1 service_config: firestore_v1.yaml diff --git a/google/cloud/firestore_v1/gapic_metadata.json b/google/cloud/firestore_v1/gapic_metadata.json index d0462f964..03a6e428b 100644 --- a/google/cloud/firestore_v1/gapic_metadata.json +++ b/google/cloud/firestore_v1/gapic_metadata.json @@ -40,6 +40,11 @@ "delete_document" ] }, + "ExecutePipeline": { + "methods": [ + "execute_pipeline" + ] + }, "GetDocument": { "methods": [ "get_document" @@ -125,6 +130,11 @@ "delete_document" ] }, + "ExecutePipeline": { + "methods": [ + "execute_pipeline" + ] + }, "GetDocument": { "methods": [ "get_document" @@ -210,6 +220,11 @@ "delete_document" ] }, + "ExecutePipeline": { + "methods": [ + "execute_pipeline" + ] + }, "GetDocument": { "methods": [ "get_document" diff --git a/google/cloud/firestore_v1/services/firestore/async_client.py b/google/cloud/firestore_v1/services/firestore/async_client.py index b904229b0..3557eb94c 100644 --- a/google/cloud/firestore_v1/services/firestore/async_client.py +++ b/google/cloud/firestore_v1/services/firestore/async_client.py @@ -53,6 +53,7 @@ from google.cloud.firestore_v1.types import common from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import document as gf_document +from google.cloud.firestore_v1.types import explain_stats from google.cloud.firestore_v1.types import firestore from google.cloud.firestore_v1.types import query from google.cloud.firestore_v1.types import query_profile @@ -1248,6 +1249,109 @@ async def sample_run_query(): # Done; return the response. return response + def execute_pipeline( + self, + request: Optional[Union[firestore.ExecutePipelineRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Awaitable[AsyncIterable[firestore.ExecutePipelineResponse]]: + r"""Executes a pipeline query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + async def sample_execute_pipeline(): + # Create a client + client = firestore_v1.FirestoreAsyncClient() + + # Initialize request argument(s) + structured_pipeline = firestore_v1.StructuredPipeline() + structured_pipeline.pipeline.stages.name = "name_value" + + request = firestore_v1.ExecutePipelineRequest( + structured_pipeline=structured_pipeline, + transaction=b'transaction_blob', + database="database_value", + ) + + # Make the request + stream = await client.execute_pipeline(request=request) + + # Handle the response + async for response in stream: + print(response) + + Args: + request (Optional[Union[google.cloud.firestore_v1.types.ExecutePipelineRequest, dict]]): + The request object. The request for + [Firestore.ExecutePipeline][google.firestore.v1.Firestore.ExecutePipeline]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + AsyncIterable[google.cloud.firestore_v1.types.ExecutePipelineResponse]: + The response for [Firestore.Execute][]. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, firestore.ExecutePipelineRequest): + request = firestore.ExecutePipelineRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.execute_pipeline + ] + + header_params = {} + + routing_param_regex = re.compile("^projects/(?P[^/]+)(?:/.*)?$") + regex_match = routing_param_regex.match(request.database) + if regex_match and regex_match.group("project_id"): + header_params["project_id"] = regex_match.group("project_id") + + routing_param_regex = re.compile( + "^projects/[^/]+/databases/(?P[^/]+)(?:/.*)?$" + ) + regex_match = routing_param_regex.match(request.database) + if regex_match and regex_match.group("database_id"): + header_params["database_id"] = regex_match.group("database_id") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def run_aggregation_query( self, request: Optional[Union[firestore.RunAggregationQueryRequest, dict]] = None, diff --git a/google/cloud/firestore_v1/services/firestore/client.py b/google/cloud/firestore_v1/services/firestore/client.py index d6540c4ff..ac86aaa9e 100644 --- a/google/cloud/firestore_v1/services/firestore/client.py +++ b/google/cloud/firestore_v1/services/firestore/client.py @@ -68,6 +68,7 @@ from google.cloud.firestore_v1.types import common from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import document as gf_document +from google.cloud.firestore_v1.types import explain_stats from google.cloud.firestore_v1.types import firestore from google.cloud.firestore_v1.types import query from google.cloud.firestore_v1.types import query_profile @@ -1649,6 +1650,107 @@ def sample_run_query(): # Done; return the response. return response + def execute_pipeline( + self, + request: Optional[Union[firestore.ExecutePipelineRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Iterable[firestore.ExecutePipelineResponse]: + r"""Executes a pipeline query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import firestore_v1 + + def sample_execute_pipeline(): + # Create a client + client = firestore_v1.FirestoreClient() + + # Initialize request argument(s) + structured_pipeline = firestore_v1.StructuredPipeline() + structured_pipeline.pipeline.stages.name = "name_value" + + request = firestore_v1.ExecutePipelineRequest( + structured_pipeline=structured_pipeline, + transaction=b'transaction_blob', + database="database_value", + ) + + # Make the request + stream = client.execute_pipeline(request=request) + + # Handle the response + for response in stream: + print(response) + + Args: + request (Union[google.cloud.firestore_v1.types.ExecutePipelineRequest, dict]): + The request object. The request for + [Firestore.ExecutePipeline][google.firestore.v1.Firestore.ExecutePipeline]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + Iterable[google.cloud.firestore_v1.types.ExecutePipelineResponse]: + The response for [Firestore.Execute][]. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, firestore.ExecutePipelineRequest): + request = firestore.ExecutePipelineRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.execute_pipeline] + + header_params = {} + + routing_param_regex = re.compile("^projects/(?P[^/]+)(?:/.*)?$") + regex_match = routing_param_regex.match(request.database) + if regex_match and regex_match.group("project_id"): + header_params["project_id"] = regex_match.group("project_id") + + routing_param_regex = re.compile( + "^projects/[^/]+/databases/(?P[^/]+)(?:/.*)?$" + ) + regex_match = routing_param_regex.match(request.database) + if regex_match and regex_match.group("database_id"): + header_params["database_id"] = regex_match.group("database_id") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def run_aggregation_query( self, request: Optional[Union[firestore.RunAggregationQueryRequest, dict]] = None, diff --git a/google/cloud/firestore_v1/services/firestore/transports/base.py b/google/cloud/firestore_v1/services/firestore/transports/base.py index 02d6c0bbc..905dded09 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/base.py +++ b/google/cloud/firestore_v1/services/firestore/transports/base.py @@ -291,6 +291,23 @@ def _prep_wrapped_messages(self, client_info): default_timeout=300.0, client_info=client_info, ), + self.execute_pipeline: gapic_v1.method.wrap_method( + self.execute_pipeline, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), self.run_aggregation_query: gapic_v1.method.wrap_method( self.run_aggregation_query, default_retry=retries.Retry( @@ -514,6 +531,18 @@ def run_query( ]: raise NotImplementedError() + @property + def execute_pipeline( + self, + ) -> Callable[ + [firestore.ExecutePipelineRequest], + Union[ + firestore.ExecutePipelineResponse, + Awaitable[firestore.ExecutePipelineResponse], + ], + ]: + raise NotImplementedError() + @property def run_aggregation_query( self, diff --git a/google/cloud/firestore_v1/services/firestore/transports/grpc.py b/google/cloud/firestore_v1/services/firestore/transports/grpc.py index 3c5bded2d..f057d16e3 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/grpc.py +++ b/google/cloud/firestore_v1/services/firestore/transports/grpc.py @@ -573,6 +573,34 @@ def run_query( ) return self._stubs["run_query"] + @property + def execute_pipeline( + self, + ) -> Callable[ + [firestore.ExecutePipelineRequest], firestore.ExecutePipelineResponse + ]: + r"""Return a callable for the execute pipeline method over gRPC. + + Executes a pipeline query. + + Returns: + Callable[[~.ExecutePipelineRequest], + ~.ExecutePipelineResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "execute_pipeline" not in self._stubs: + self._stubs["execute_pipeline"] = self._logged_channel.unary_stream( + "/google.firestore.v1.Firestore/ExecutePipeline", + request_serializer=firestore.ExecutePipelineRequest.serialize, + response_deserializer=firestore.ExecutePipelineResponse.deserialize, + ) + return self._stubs["execute_pipeline"] + @property def run_aggregation_query( self, diff --git a/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py b/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py index 6cc93e21a..cf6006672 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py +++ b/google/cloud/firestore_v1/services/firestore/transports/grpc_asyncio.py @@ -589,6 +589,34 @@ def run_query( ) return self._stubs["run_query"] + @property + def execute_pipeline( + self, + ) -> Callable[ + [firestore.ExecutePipelineRequest], Awaitable[firestore.ExecutePipelineResponse] + ]: + r"""Return a callable for the execute pipeline method over gRPC. + + Executes a pipeline query. + + Returns: + Callable[[~.ExecutePipelineRequest], + Awaitable[~.ExecutePipelineResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "execute_pipeline" not in self._stubs: + self._stubs["execute_pipeline"] = self._logged_channel.unary_stream( + "/google.firestore.v1.Firestore/ExecutePipeline", + request_serializer=firestore.ExecutePipelineRequest.serialize, + response_deserializer=firestore.ExecutePipelineResponse.deserialize, + ) + return self._stubs["execute_pipeline"] + @property def run_aggregation_query( self, @@ -964,6 +992,23 @@ def _prep_wrapped_messages(self, client_info): default_timeout=300.0, client_info=client_info, ), + self.execute_pipeline: self._wrap_method( + self.execute_pipeline, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), self.run_aggregation_query: self._wrap_method( self.run_aggregation_query, default_retry=retries.AsyncRetry( diff --git a/google/cloud/firestore_v1/services/firestore/transports/rest.py b/google/cloud/firestore_v1/services/firestore/transports/rest.py index a32a7e84e..845569d97 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/rest.py +++ b/google/cloud/firestore_v1/services/firestore/transports/rest.py @@ -127,6 +127,14 @@ def pre_delete_document(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_execute_pipeline(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_execute_pipeline(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_document(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -445,6 +453,56 @@ def pre_delete_document( """ return request, metadata + def pre_execute_pipeline( + self, + request: firestore.ExecutePipelineRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + firestore.ExecutePipelineRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for execute_pipeline + + Override in a subclass to manipulate the request or metadata + before they are sent to the Firestore server. + """ + return request, metadata + + def post_execute_pipeline( + self, response: rest_streaming.ResponseIterator + ) -> rest_streaming.ResponseIterator: + """Post-rpc interceptor for execute_pipeline + + DEPRECATED. Please use the `post_execute_pipeline_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Firestore server but before + it is returned to user code. This `post_execute_pipeline` interceptor runs + before the `post_execute_pipeline_with_metadata` interceptor. + """ + return response + + def post_execute_pipeline_with_metadata( + self, + response: rest_streaming.ResponseIterator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for execute_pipeline + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firestore server but before it is returned to user code. + + We recommend only using this `post_execute_pipeline_with_metadata` + interceptor in new development instead of the `post_execute_pipeline` interceptor. + When both interceptors are used, this `post_execute_pipeline_with_metadata` interceptor runs after the + `post_execute_pipeline` interceptor. The (possibly modified) response returned by + `post_execute_pipeline` will be passed to + `post_execute_pipeline_with_metadata`. + """ + return response, metadata + def pre_get_document( self, request: firestore.GetDocumentRequest, @@ -1873,6 +1931,158 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + class _ExecutePipeline( + _BaseFirestoreRestTransport._BaseExecutePipeline, FirestoreRestStub + ): + def __hash__(self): + return hash("FirestoreRestTransport.ExecutePipeline") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + stream=True, + ) + return response + + def __call__( + self, + request: firestore.ExecutePipelineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> rest_streaming.ResponseIterator: + r"""Call the execute pipeline method over HTTP. + + Args: + request (~.firestore.ExecutePipelineRequest): + The request object. The request for + [Firestore.ExecutePipeline][google.firestore.v1.Firestore.ExecutePipeline]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.firestore.ExecutePipelineResponse: + The response for [Firestore.Execute][]. + """ + + http_options = ( + _BaseFirestoreRestTransport._BaseExecutePipeline._get_http_options() + ) + + request, metadata = self._interceptor.pre_execute_pipeline( + request, metadata + ) + transcoded_request = _BaseFirestoreRestTransport._BaseExecutePipeline._get_transcoded_request( + http_options, request + ) + + body = ( + _BaseFirestoreRestTransport._BaseExecutePipeline._get_request_body_json( + transcoded_request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseFirestoreRestTransport._BaseExecutePipeline._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.firestore_v1.FirestoreClient.ExecutePipeline", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "ExecutePipeline", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FirestoreRestTransport._ExecutePipeline._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rest_streaming.ResponseIterator( + response, firestore.ExecutePipelineResponse + ) + + resp = self._interceptor.post_execute_pipeline(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_execute_pipeline_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + http_response = { + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.firestore_v1.FirestoreClient.execute_pipeline", + extra={ + "serviceName": "google.firestore.v1.Firestore", + "rpcName": "ExecutePipeline", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _GetDocument(_BaseFirestoreRestTransport._BaseGetDocument, FirestoreRestStub): def __hash__(self): return hash("FirestoreRestTransport.GetDocument") @@ -3143,6 +3353,16 @@ def delete_document( # In C++ this would require a dynamic_cast return self._DeleteDocument(self._session, self._host, self._interceptor) # type: ignore + @property + def execute_pipeline( + self, + ) -> Callable[ + [firestore.ExecutePipelineRequest], firestore.ExecutePipelineResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExecutePipeline(self._session, self._host, self._interceptor) # type: ignore + @property def get_document( self, diff --git a/google/cloud/firestore_v1/services/firestore/transports/rest_base.py b/google/cloud/firestore_v1/services/firestore/transports/rest_base.py index 1d95cd16e..80ce35e49 100644 --- a/google/cloud/firestore_v1/services/firestore/transports/rest_base.py +++ b/google/cloud/firestore_v1/services/firestore/transports/rest_base.py @@ -426,6 +426,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseExecutePipeline: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{database=projects/*/databases/*}/documents:executePipeline", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = firestore.ExecutePipelineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFirestoreRestTransport._BaseExecutePipeline._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetDocument: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/google/cloud/firestore_v1/types/__init__.py b/google/cloud/firestore_v1/types/__init__.py index ae1004e13..ed1965d7f 100644 --- a/google/cloud/firestore_v1/types/__init__.py +++ b/google/cloud/firestore_v1/types/__init__.py @@ -28,9 +28,14 @@ from .document import ( ArrayValue, Document, + Function, MapValue, + Pipeline, Value, ) +from .explain_stats import ( + ExplainStats, +) from .firestore import ( BatchGetDocumentsRequest, BatchGetDocumentsResponse, @@ -42,6 +47,8 @@ CommitResponse, CreateDocumentRequest, DeleteDocumentRequest, + ExecutePipelineRequest, + ExecutePipelineResponse, GetDocumentRequest, ListCollectionIdsRequest, ListCollectionIdsResponse, @@ -62,6 +69,9 @@ WriteRequest, WriteResponse, ) +from .pipeline import ( + StructuredPipeline, +) from .query import ( Cursor, StructuredAggregationQuery, @@ -92,8 +102,11 @@ "TransactionOptions", "ArrayValue", "Document", + "Function", "MapValue", + "Pipeline", "Value", + "ExplainStats", "BatchGetDocumentsRequest", "BatchGetDocumentsResponse", "BatchWriteRequest", @@ -104,6 +117,8 @@ "CommitResponse", "CreateDocumentRequest", "DeleteDocumentRequest", + "ExecutePipelineRequest", + "ExecutePipelineResponse", "GetDocumentRequest", "ListCollectionIdsRequest", "ListCollectionIdsResponse", @@ -123,6 +138,7 @@ "UpdateDocumentRequest", "WriteRequest", "WriteResponse", + "StructuredPipeline", "Cursor", "StructuredAggregationQuery", "StructuredQuery", diff --git a/google/cloud/firestore_v1/types/document.py b/google/cloud/firestore_v1/types/document.py index 22fe79b73..8073ad97a 100644 --- a/google/cloud/firestore_v1/types/document.py +++ b/google/cloud/firestore_v1/types/document.py @@ -31,6 +31,8 @@ "Value", "ArrayValue", "MapValue", + "Function", + "Pipeline", }, ) @@ -183,6 +185,37 @@ class Value(proto.Message): map_value (google.cloud.firestore_v1.types.MapValue): A map value. + This field is a member of `oneof`_ ``value_type``. + field_reference_value (str): + Value which references a field. + + This is considered relative (vs absolute) since it only + refers to a field and not a field within a particular + document. + + **Requires:** + + - Must follow [field reference][FieldReference.field_path] + limitations. + + - Not allowed to be used when writing documents. + + This field is a member of `oneof`_ ``value_type``. + function_value (google.cloud.firestore_v1.types.Function): + A value that represents an unevaluated expression. + + **Requires:** + + - Not allowed to be used when writing documents. + + This field is a member of `oneof`_ ``value_type``. + pipeline_value (google.cloud.firestore_v1.types.Pipeline): + A value that represents an unevaluated pipeline. + + **Requires:** + + - Not allowed to be used when writing documents. + This field is a member of `oneof`_ ``value_type``. """ @@ -246,6 +279,23 @@ class Value(proto.Message): oneof="value_type", message="MapValue", ) + field_reference_value: str = proto.Field( + proto.STRING, + number=19, + oneof="value_type", + ) + function_value: "Function" = proto.Field( + proto.MESSAGE, + number=20, + oneof="value_type", + message="Function", + ) + pipeline_value: "Pipeline" = proto.Field( + proto.MESSAGE, + number=21, + oneof="value_type", + message="Pipeline", + ) class ArrayValue(proto.Message): @@ -285,4 +335,119 @@ class MapValue(proto.Message): ) +class Function(proto.Message): + r"""Represents an unevaluated scalar expression. + + For example, the expression ``like(user_name, "%alice%")`` is + represented as: + + :: + + name: "like" + args { field_reference: "user_name" } + args { string_value: "%alice%" } + + Attributes: + name (str): + Required. The name of the function to evaluate. + + **Requires:** + + - must be in snake case (lower case with underscore + separator). + args (MutableSequence[google.cloud.firestore_v1.types.Value]): + Optional. Ordered list of arguments the given + function expects. + options (MutableMapping[str, google.cloud.firestore_v1.types.Value]): + Optional. Optional named arguments that + certain functions may support. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + args: MutableSequence["Value"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Value", + ) + options: MutableMapping[str, "Value"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=3, + message="Value", + ) + + +class Pipeline(proto.Message): + r"""A Firestore query represented as an ordered list of + operations / stages. + + Attributes: + stages (MutableSequence[google.cloud.firestore_v1.types.Pipeline.Stage]): + Required. Ordered list of stages to evaluate. + """ + + class Stage(proto.Message): + r"""A single operation within a pipeline. + + A stage is made up of a unique name, and a list of arguments. The + exact number of arguments & types is dependent on the stage type. + + To give an example, the stage ``filter(state = "MD")`` would be + encoded as: + + :: + + name: "filter" + args { + function_value { + name: "eq" + args { field_reference_value: "state" } + args { string_value: "MD" } + } + } + + See public documentation for the full list. + + Attributes: + name (str): + Required. The name of the stage to evaluate. + + **Requires:** + + - must be in snake case (lower case with underscore + separator). + args (MutableSequence[google.cloud.firestore_v1.types.Value]): + Optional. Ordered list of arguments the given + stage expects. + options (MutableMapping[str, google.cloud.firestore_v1.types.Value]): + Optional. Optional named arguments that + certain functions may support. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + args: MutableSequence["Value"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Value", + ) + options: MutableMapping[str, "Value"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=3, + message="Value", + ) + + stages: MutableSequence[Stage] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Stage, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1/types/explain_stats.py b/google/cloud/firestore_v1/types/explain_stats.py new file mode 100644 index 000000000..b0f9421ba --- /dev/null +++ b/google/cloud/firestore_v1/types/explain_stats.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import any_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.firestore.v1", + manifest={ + "ExplainStats", + }, +) + + +class ExplainStats(proto.Message): + r"""Pipeline explain stats. + + Depending on the explain options in the original request, this + can contain the optimized plan and / or execution stats. + + Attributes: + data (google.protobuf.any_pb2.Any): + The format depends on the ``output_format`` options in the + request. + + Currently there are two supported options: ``TEXT`` and + ``JSON``. Both supply a ``google.protobuf.StringValue``. + """ + + data: any_pb2.Any = proto.Field( + proto.MESSAGE, + number=1, + message=any_pb2.Any, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1/types/firestore.py b/google/cloud/firestore_v1/types/firestore.py index 190f55d28..4e53ba313 100644 --- a/google/cloud/firestore_v1/types/firestore.py +++ b/google/cloud/firestore_v1/types/firestore.py @@ -22,6 +22,8 @@ from google.cloud.firestore_v1.types import aggregation_result from google.cloud.firestore_v1.types import common from google.cloud.firestore_v1.types import document as gf_document +from google.cloud.firestore_v1.types import explain_stats as gf_explain_stats +from google.cloud.firestore_v1.types import pipeline from google.cloud.firestore_v1.types import query as gf_query from google.cloud.firestore_v1.types import query_profile from google.cloud.firestore_v1.types import write @@ -48,6 +50,8 @@ "RollbackRequest", "RunQueryRequest", "RunQueryResponse", + "ExecutePipelineRequest", + "ExecutePipelineResponse", "RunAggregationQueryRequest", "RunAggregationQueryResponse", "PartitionQueryRequest", @@ -835,6 +839,151 @@ class RunQueryResponse(proto.Message): ) +class ExecutePipelineRequest(proto.Message): + r"""The request for + [Firestore.ExecutePipeline][google.firestore.v1.Firestore.ExecutePipeline]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + database (str): + Required. Database identifier, in the form + ``projects/{project}/databases/{database}``. + structured_pipeline (google.cloud.firestore_v1.types.StructuredPipeline): + A pipelined operation. + + This field is a member of `oneof`_ ``pipeline_type``. + transaction (bytes): + Run the query within an already active + transaction. + The value here is the opaque transaction ID to + execute the query in. + + This field is a member of `oneof`_ ``consistency_selector``. + new_transaction (google.cloud.firestore_v1.types.TransactionOptions): + Execute the pipeline in a new transaction. + + The identifier of the newly created transaction + will be returned in the first response on the + stream. This defaults to a read-only + transaction. + + This field is a member of `oneof`_ ``consistency_selector``. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Execute the pipeline in a snapshot + transaction at the given time. + This must be a microsecond precision timestamp + within the past one hour, or if Point-in-Time + Recovery is enabled, can additionally be a whole + minute timestamp within the past 7 days. + + This field is a member of `oneof`_ ``consistency_selector``. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + structured_pipeline: pipeline.StructuredPipeline = proto.Field( + proto.MESSAGE, + number=2, + oneof="pipeline_type", + message=pipeline.StructuredPipeline, + ) + transaction: bytes = proto.Field( + proto.BYTES, + number=5, + oneof="consistency_selector", + ) + new_transaction: common.TransactionOptions = proto.Field( + proto.MESSAGE, + number=6, + oneof="consistency_selector", + message=common.TransactionOptions, + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + oneof="consistency_selector", + message=timestamp_pb2.Timestamp, + ) + + +class ExecutePipelineResponse(proto.Message): + r"""The response for [Firestore.Execute][]. + + Attributes: + transaction (bytes): + Newly created transaction identifier. + + This field is only specified as part of the first response + from the server, alongside the ``results`` field when the + original request specified + [ExecuteRequest.new_transaction][]. + results (MutableSequence[google.cloud.firestore_v1.types.Document]): + An ordered batch of results returned executing a pipeline. + + The batch size is variable, and can even be zero for when + only a partial progress message is returned. + + The fields present in the returned documents are only those + that were explicitly requested in the pipeline, this + includes those like + [``__name__``][google.firestore.v1.Document.name] and + [``__update_time__``][google.firestore.v1.Document.update_time]. + This is explicitly a divergence from ``Firestore.RunQuery`` + / ``Firestore.GetDocument`` RPCs which always return such + fields even when they are not specified in the + [``mask``][google.firestore.v1.DocumentMask]. + execution_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the results are valid. + + This is a (not strictly) monotonically increasing value + across multiple responses in the same stream. The API + guarantees that all previously returned results are still + valid at the latest ``execution_time``. This allows the API + consumer to treat the query if it ran at the latest + ``execution_time`` returned. + + If the query returns no results, a response with + ``execution_time`` and no ``results`` will be sent, and this + represents the time at which the operation was run. + explain_stats (google.cloud.firestore_v1.types.ExplainStats): + Query explain stats. + + This is present on the **last** response if the request + configured explain to run in 'analyze' or 'explain' mode in + the pipeline options. If the query does not return any + results, a response with ``explain_stats`` and no + ``results`` will still be sent. + """ + + transaction: bytes = proto.Field( + proto.BYTES, + number=1, + ) + results: MutableSequence[gf_document.Document] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=gf_document.Document, + ) + execution_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + explain_stats: gf_explain_stats.ExplainStats = proto.Field( + proto.MESSAGE, + number=4, + message=gf_explain_stats.ExplainStats, + ) + + class RunAggregationQueryRequest(proto.Message): r"""The request for [Firestore.RunAggregationQuery][google.firestore.v1.Firestore.RunAggregationQuery]. @@ -1416,9 +1565,9 @@ class Target(proto.Message): Note that if the client sends multiple ``AddTarget`` requests without an ID, the order of IDs returned in - ``TargetChage.target_ids`` are undefined. Therefore, clients - should provide a target ID instead of relying on the server - to assign one. + ``TargetChange.target_ids`` are undefined. Therefore, + clients should provide a target ID instead of relying on the + server to assign one. If ``target_id`` is non-zero, there must not be an existing active target on this stream with the same ID. diff --git a/google/cloud/firestore_v1/types/pipeline.py b/google/cloud/firestore_v1/types/pipeline.py new file mode 100644 index 000000000..07688dda7 --- /dev/null +++ b/google/cloud/firestore_v1/types/pipeline.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.firestore_v1.types import document + + +__protobuf__ = proto.module( + package="google.firestore.v1", + manifest={ + "StructuredPipeline", + }, +) + + +class StructuredPipeline(proto.Message): + r"""A Firestore query represented as an ordered list of operations / + stages. + + This is considered the top-level function which plans and executes a + query. It is logically equivalent to ``query(stages, options)``, but + prevents the client from having to build a function wrapper. + + Attributes: + pipeline (google.cloud.firestore_v1.types.Pipeline): + Required. The pipeline query to execute. + options (MutableMapping[str, google.cloud.firestore_v1.types.Value]): + Optional. Optional query-level arguments. + """ + + pipeline: document.Pipeline = proto.Field( + proto.MESSAGE, + number=1, + message=document.Pipeline, + ) + options: MutableMapping[str, document.Value] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message=document.Value, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1/types/query.py b/google/cloud/firestore_v1/types/query.py index c2856d0b4..d50742785 100644 --- a/google/cloud/firestore_v1/types/query.py +++ b/google/cloud/firestore_v1/types/query.py @@ -555,9 +555,9 @@ class FindNearest(proto.Message): when the vectors are more similar, the comparison is inverted. - - For EUCLIDEAN, COSINE: WHERE distance <= - distance_threshold - - For DOT_PRODUCT: WHERE distance >= distance_threshold + - For EUCLIDEAN, COSINE: + ``WHERE distance <= distance_threshold`` + - For DOT_PRODUCT: ``WHERE distance >= distance_threshold`` """ class DistanceMeasure(proto.Enum): diff --git a/tests/unit/gapic/firestore_v1/test_firestore.py b/tests/unit/gapic/firestore_v1/test_firestore.py index c5994da87..e3821e772 100644 --- a/tests/unit/gapic/firestore_v1/test_firestore.py +++ b/tests/unit/gapic/firestore_v1/test_firestore.py @@ -61,7 +61,9 @@ from google.cloud.firestore_v1.types import common from google.cloud.firestore_v1.types import document from google.cloud.firestore_v1.types import document as gf_document +from google.cloud.firestore_v1.types import explain_stats from google.cloud.firestore_v1.types import firestore +from google.cloud.firestore_v1.types import pipeline from google.cloud.firestore_v1.types import query from google.cloud.firestore_v1.types import query_profile from google.cloud.firestore_v1.types import write as gf_write @@ -4080,6 +4082,185 @@ async def test_run_query_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + firestore.ExecutePipelineRequest, + dict, + ], +) +def test_execute_pipeline(request_type, transport: str = "grpc"): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = iter([firestore.ExecutePipelineResponse()]) + response = client.execute_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = firestore.ExecutePipelineRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, firestore.ExecutePipelineResponse) + + +def test_execute_pipeline_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = firestore.ExecutePipelineRequest( + database="database_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.execute_pipeline(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == firestore.ExecutePipelineRequest( + database="database_value", + ) + + +def test_execute_pipeline_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.execute_pipeline in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.execute_pipeline + ] = mock_rpc + request = {} + client.execute_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.execute_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_execute_pipeline_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.execute_pipeline + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.execute_pipeline + ] = mock_rpc + + request = {} + await client.execute_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.execute_pipeline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_execute_pipeline_async( + transport: str = "grpc_asyncio", request_type=firestore.ExecutePipelineRequest +): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[firestore.ExecutePipelineResponse()] + ) + response = await client.execute_pipeline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = firestore.ExecutePipelineRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, firestore.ExecutePipelineResponse) + + +@pytest.mark.asyncio +async def test_execute_pipeline_async_from_dict(): + await test_execute_pipeline_async(request_type=dict) + + @pytest.mark.parametrize( "request_type", [ @@ -7606,7 +7787,7 @@ def test_run_query_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("parent",))) -def test_run_aggregation_query_rest_use_cached_wrapped_rpc(): +def test_execute_pipeline_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7620,10 +7801,7 @@ def test_run_aggregation_query_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.run_aggregation_query - in client._transport._wrapped_methods - ) + assert client._transport.execute_pipeline in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -7631,29 +7809,29 @@ def test_run_aggregation_query_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.run_aggregation_query + client._transport.execute_pipeline ] = mock_rpc request = {} - client.run_aggregation_query(request) + client.execute_pipeline(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.run_aggregation_query(request) + client.execute_pipeline(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_run_aggregation_query_rest_required_fields( - request_type=firestore.RunAggregationQueryRequest, +def test_execute_pipeline_rest_required_fields( + request_type=firestore.ExecutePipelineRequest, ): transport_class = transports.FirestoreRestTransport request_init = {} - request_init["parent"] = "" + request_init["database"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7664,21 +7842,21 @@ def test_run_aggregation_query_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).run_aggregation_query._get_unset_required_fields(jsonified_request) + ).execute_pipeline._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["database"] = "database_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).run_aggregation_query._get_unset_required_fields(jsonified_request) + ).execute_pipeline._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "database" in jsonified_request + assert jsonified_request["database"] == "database_value" client = FirestoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7687,7 +7865,7 @@ def test_run_aggregation_query_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = firestore.RunAggregationQueryResponse() + return_value = firestore.ExecutePipelineResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7709,7 +7887,7 @@ def test_run_aggregation_query_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = firestore.RunAggregationQueryResponse.pb(return_value) + return_value = firestore.ExecutePipelineResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) json_return_value = "[{}]".format(json_return_value) @@ -7719,23 +7897,23 @@ def test_run_aggregation_query_rest_required_fields( with mock.patch.object(response_value, "iter_content") as iter_content: iter_content.return_value = iter(json_return_value) - response = client.run_aggregation_query(request) + response = client.execute_pipeline(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_run_aggregation_query_rest_unset_required_fields(): +def test_execute_pipeline_rest_unset_required_fields(): transport = transports.FirestoreRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.run_aggregation_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.execute_pipeline._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database",))) -def test_partition_query_rest_use_cached_wrapped_rpc(): +def test_run_aggregation_query_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7749,30 +7927,35 @@ def test_partition_query_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.partition_query in client._transport._wrapped_methods + assert ( + client._transport.run_aggregation_query + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.partition_query] = mock_rpc + client._transport._wrapped_methods[ + client._transport.run_aggregation_query + ] = mock_rpc request = {} - client.partition_query(request) + client.run_aggregation_query(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.partition_query(request) + client.run_aggregation_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_partition_query_rest_required_fields( - request_type=firestore.PartitionQueryRequest, +def test_run_aggregation_query_rest_required_fields( + request_type=firestore.RunAggregationQueryRequest, ): transport_class = transports.FirestoreRestTransport @@ -7788,7 +7971,7 @@ def test_partition_query_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).partition_query._get_unset_required_fields(jsonified_request) + ).run_aggregation_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -7797,7 +7980,7 @@ def test_partition_query_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).partition_query._get_unset_required_fields(jsonified_request) + ).run_aggregation_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7811,7 +7994,7 @@ def test_partition_query_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = firestore.PartitionQueryResponse() + return_value = firestore.RunAggregationQueryResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7833,68 +8016,192 @@ def test_partition_query_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = firestore.PartitionQueryResponse.pb(return_value) + return_value = firestore.RunAggregationQueryResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.partition_query(request) + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.run_aggregation_query(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_partition_query_rest_unset_required_fields(): +def test_run_aggregation_query_rest_unset_required_fields(): transport = transports.FirestoreRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.partition_query._get_unset_required_fields({}) + unset_fields = transport.run_aggregation_query._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("parent",))) -def test_partition_query_rest_pager(transport: str = "rest"): - client = FirestoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - query.Cursor(), - ], - next_page_token="abc", - ), - firestore.PartitionQueryResponse( - partitions=[], - next_page_token="def", - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - ], - next_page_token="ghi", - ), - firestore.PartitionQueryResponse( - partitions=[ - query.Cursor(), - query.Cursor(), - ], - ), +def test_partition_query_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Two responses for two calls - response = response + response + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.partition_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.partition_query] = mock_rpc + + request = {} + client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.partition_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_partition_query_rest_required_fields( + request_type=firestore.PartitionQueryRequest, +): + transport_class = transports.FirestoreRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).partition_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).partition_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = firestore.PartitionQueryResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.PartitionQueryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.partition_query(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_partition_query_rest_unset_required_fields(): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.partition_query._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +def test_partition_query_rest_pager(transport: str = "rest"): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + query.Cursor(), + query.Cursor(), + ], + next_page_token="abc", + ), + firestore.PartitionQueryResponse( + partitions=[], + next_page_token="def", + ), + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + ], + next_page_token="ghi", + ), + firestore.PartitionQueryResponse( + partitions=[ + query.Cursor(), + query.Cursor(), + ], + ), + ) + # Two responses for two calls + response = response + response # Wrap the values into proper Response objs response = tuple(firestore.PartitionQueryResponse.to_json(x) for x in response) @@ -8749,6 +9056,27 @@ def test_run_query_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_pipeline_empty_call_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: + call.return_value = iter([firestore.ExecutePipelineResponse()]) + client.execute_pipeline(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.ExecutePipelineRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_run_aggregation_query_empty_call_grpc(): @@ -8858,6 +9186,60 @@ def test_create_document_empty_call_grpc(): assert args[0] == request_msg +def test_execute_pipeline_routing_parameters_request_1_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: + call.return_value = iter([firestore.ExecutePipelineResponse()]) + client.execute_pipeline(request={"database": "projects/sample1/sample2"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = firestore.ExecutePipelineRequest( + **{"database": "projects/sample1/sample2"} + ) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_execute_pipeline_routing_parameters_request_2_grpc(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: + call.return_value = iter([firestore.ExecutePipelineResponse()]) + client.execute_pipeline( + request={"database": "projects/sample1/databases/sample2/sample3"} + ) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = firestore.ExecutePipelineRequest( + **{"database": "projects/sample1/databases/sample2/sample3"} + ) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1", "database_id": "sample2"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + def test_transport_kind_grpc_asyncio(): transport = FirestoreAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -9107,6 +9489,32 @@ async def test_run_query_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_execute_pipeline_empty_call_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[firestore.ExecutePipelineResponse()] + ) + await client.execute_pipeline(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.ExecutePipelineRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -9244,6 +9652,70 @@ async def test_create_document_empty_call_grpc_asyncio(): assert args[0] == request_msg +@pytest.mark.asyncio +async def test_execute_pipeline_routing_parameters_request_1_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[firestore.ExecutePipelineResponse()] + ) + await client.execute_pipeline(request={"database": "projects/sample1/sample2"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = firestore.ExecutePipelineRequest( + **{"database": "projects/sample1/sample2"} + ) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +@pytest.mark.asyncio +async def test_execute_pipeline_routing_parameters_request_2_grpc_asyncio(): + client = FirestoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[firestore.ExecutePipelineResponse()] + ) + await client.execute_pipeline( + request={"database": "projects/sample1/databases/sample2/sample3"} + ) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = firestore.ExecutePipelineRequest( + **{"database": "projects/sample1/databases/sample2/sample3"} + ) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1", "database_id": "sample2"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + def test_transport_kind_rest(): transport = FirestoreClient.get_transport_class("rest")( credentials=ga_credentials.AnonymousCredentials() @@ -10429,6 +10901,137 @@ def test_run_query_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_execute_pipeline_rest_bad_request( + request_type=firestore.ExecutePipelineRequest, +): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"database": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.execute_pipeline(request) + + +@pytest.mark.parametrize( + "request_type", + [ + firestore.ExecutePipelineRequest, + dict, + ], +) +def test_execute_pipeline_rest_call_success(request_type): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"database": "projects/sample1/databases/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = firestore.ExecutePipelineResponse( + transaction=b"transaction_blob", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = firestore.ExecutePipelineResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.execute_pipeline(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, firestore.ExecutePipelineResponse) + assert response.transaction == b"transaction_blob" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_execute_pipeline_rest_interceptors(null_interceptor): + transport = transports.FirestoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FirestoreRestInterceptor(), + ) + client = FirestoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FirestoreRestInterceptor, "post_execute_pipeline" + ) as post, mock.patch.object( + transports.FirestoreRestInterceptor, "post_execute_pipeline_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.FirestoreRestInterceptor, "pre_execute_pipeline" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = firestore.ExecutePipelineRequest.pb( + firestore.ExecutePipelineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = firestore.ExecutePipelineResponse.to_json( + firestore.ExecutePipelineResponse() + ) + req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) + + request = firestore.ExecutePipelineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = firestore.ExecutePipelineResponse() + post_with_metadata.return_value = firestore.ExecutePipelineResponse(), metadata + + client.execute_pipeline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_run_aggregation_query_rest_bad_request( request_type=firestore.RunAggregationQueryRequest, ): @@ -11605,6 +12208,26 @@ def test_run_query_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_pipeline_empty_call_rest(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: + client.execute_pipeline(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = firestore.ExecutePipelineRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_run_aggregation_query_empty_call_rest(): @@ -11709,6 +12332,58 @@ def test_create_document_empty_call_rest(): assert args[0] == request_msg +def test_execute_pipeline_routing_parameters_request_1_rest(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: + client.execute_pipeline(request={"database": "projects/sample1/sample2"}) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = firestore.ExecutePipelineRequest( + **{"database": "projects/sample1/sample2"} + ) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + +def test_execute_pipeline_routing_parameters_request_2_rest(): + client = FirestoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.execute_pipeline), "__call__") as call: + client.execute_pipeline( + request={"database": "projects/sample1/databases/sample2/sample3"} + ) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = firestore.ExecutePipelineRequest( + **{"database": "projects/sample1/databases/sample2/sample3"} + ) + + assert args[0] == request_msg + + expected_headers = {"project_id": "sample1", "database_id": "sample2"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = FirestoreClient( @@ -11751,6 +12426,7 @@ def test_firestore_base_transport(): "commit", "rollback", "run_query", + "execute_pipeline", "run_aggregation_query", "partition_query", "write", @@ -12056,6 +12732,9 @@ def test_firestore_client_transport_session_collision(transport_name): session1 = client1.transport.run_query._session session2 = client2.transport.run_query._session assert session1 != session2 + session1 = client1.transport.execute_pipeline._session + session2 = client2.transport.execute_pipeline._session + assert session1 != session2 session1 = client1.transport.run_aggregation_query._session session2 = client2.transport.run_aggregation_query._session assert session1 != session2