From bf17e87daba41e39c3659f1efb71504910e27f20 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 5 Mar 2019 15:23:57 -0500 Subject: [PATCH 1/7] Update synth to generate 'v1' GAPIC libs. --- .../cloud/firestore_v1/gapic/__init__.py | 0 .../google/cloud/firestore_v1/gapic/enums.py | 147 + .../firestore_v1/gapic/firestore_client.py | 1326 ++++++ .../gapic/firestore_client_config.py | 97 + .../firestore_v1/gapic/transports/__init__.py | 0 .../transports/firestore_grpc_transport.py | 269 ++ .../cloud/firestore_v1/proto/__init__.py | 0 .../cloud/firestore_v1/proto/common.proto | 85 + .../cloud/firestore_v1/proto/common_pb2.py | 450 ++ .../firestore_v1/proto/common_pb2_grpc.py | 2 + .../cloud/firestore_v1/proto/document.proto | 150 + .../cloud/firestore_v1/proto/document_pb2.py | 797 ++++ .../firestore_v1/proto/document_pb2_grpc.py | 2 + .../cloud/firestore_v1/proto/firestore.proto | 782 ++++ .../cloud/firestore_v1/proto/firestore_pb2.py | 3783 +++++++++++++++++ .../firestore_v1/proto/firestore_pb2_grpc.py | 294 ++ .../cloud/firestore_v1/proto/query.proto | 235 + .../cloud/firestore_v1/proto/query_pb2.py | 1186 ++++++ .../firestore_v1/proto/query_pb2_grpc.py | 2 + .../cloud/firestore_v1/proto/write.proto | 263 ++ .../cloud/firestore_v1/proto/write_pb2.py | 1144 +++++ .../firestore_v1/proto/write_pb2_grpc.py | 2 + firestore/synth.metadata | 12 +- firestore/synth.py | 51 +- .../unit/gapic/v1/test_firestore_client_v1.py | 645 +++ 25 files changed, 11700 insertions(+), 24 deletions(-) create mode 100644 firestore/google/cloud/firestore_v1/gapic/__init__.py create mode 100644 firestore/google/cloud/firestore_v1/gapic/enums.py create mode 100644 firestore/google/cloud/firestore_v1/gapic/firestore_client.py create mode 100644 firestore/google/cloud/firestore_v1/gapic/firestore_client_config.py create mode 100644 firestore/google/cloud/firestore_v1/gapic/transports/__init__.py create mode 100644 firestore/google/cloud/firestore_v1/gapic/transports/firestore_grpc_transport.py create mode 100644 firestore/google/cloud/firestore_v1/proto/__init__.py create mode 100644 firestore/google/cloud/firestore_v1/proto/common.proto create mode 100644 firestore/google/cloud/firestore_v1/proto/common_pb2.py create mode 100644 firestore/google/cloud/firestore_v1/proto/common_pb2_grpc.py create mode 100644 firestore/google/cloud/firestore_v1/proto/document.proto create mode 100644 firestore/google/cloud/firestore_v1/proto/document_pb2.py create mode 100644 firestore/google/cloud/firestore_v1/proto/document_pb2_grpc.py create mode 100644 firestore/google/cloud/firestore_v1/proto/firestore.proto create mode 100644 firestore/google/cloud/firestore_v1/proto/firestore_pb2.py create mode 100644 firestore/google/cloud/firestore_v1/proto/firestore_pb2_grpc.py create mode 100644 firestore/google/cloud/firestore_v1/proto/query.proto create mode 100644 firestore/google/cloud/firestore_v1/proto/query_pb2.py create mode 100644 firestore/google/cloud/firestore_v1/proto/query_pb2_grpc.py create mode 100644 firestore/google/cloud/firestore_v1/proto/write.proto create mode 100644 firestore/google/cloud/firestore_v1/proto/write_pb2.py create mode 100644 firestore/google/cloud/firestore_v1/proto/write_pb2_grpc.py create mode 100644 firestore/tests/unit/gapic/v1/test_firestore_client_v1.py diff --git a/firestore/google/cloud/firestore_v1/gapic/__init__.py b/firestore/google/cloud/firestore_v1/gapic/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/firestore/google/cloud/firestore_v1/gapic/enums.py b/firestore/google/cloud/firestore_v1/gapic/enums.py new file mode 100644 index 000000000000..2c969322a5a9 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/gapic/enums.py @@ -0,0 +1,147 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Wrappers for protocol buffer enum types.""" + +import enum + + +class NullValue(enum.IntEnum): + """ + ``NullValue`` is a singleton enumeration to represent the null value for + the ``Value`` type union. + + The JSON representation for ``NullValue`` is JSON ``null``. + + Attributes: + NULL_VALUE (int): Null value. + """ + + NULL_VALUE = 0 + + +class DocumentTransform(object): + class FieldTransform(object): + class ServerValue(enum.IntEnum): + """ + A value that is calculated by the server. + + Attributes: + SERVER_VALUE_UNSPECIFIED (int): Unspecified. This value must not be used. + REQUEST_TIME (int): The time at which the server processed the request, with millisecond + precision. + """ + + SERVER_VALUE_UNSPECIFIED = 0 + REQUEST_TIME = 1 + + +class StructuredQuery(object): + class Direction(enum.IntEnum): + """ + A sort direction. + + Attributes: + DIRECTION_UNSPECIFIED (int): Unspecified. + ASCENDING (int): Ascending. + DESCENDING (int): Descending. + """ + + DIRECTION_UNSPECIFIED = 0 + ASCENDING = 1 + DESCENDING = 2 + + class CompositeFilter(object): + class Operator(enum.IntEnum): + """ + A composite filter operator. + + Attributes: + OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used. + AND (int): The results are required to satisfy each of the combined filters. + """ + + OPERATOR_UNSPECIFIED = 0 + AND = 1 + + class FieldFilter(object): + class Operator(enum.IntEnum): + """ + A field filter operator. + + Attributes: + OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used. + LESS_THAN (int): Less than. Requires that the field come first in ``order_by``. + LESS_THAN_OR_EQUAL (int): Less than or equal. Requires that the field come first in ``order_by``. + GREATER_THAN (int): Greater than. Requires that the field come first in ``order_by``. + GREATER_THAN_OR_EQUAL (int): Greater than or equal. Requires that the field come first in + ``order_by``. + EQUAL (int): Equal. + ARRAY_CONTAINS (int): Contains. Requires that the field is an array. + """ + + OPERATOR_UNSPECIFIED = 0 + LESS_THAN = 1 + LESS_THAN_OR_EQUAL = 2 + GREATER_THAN = 3 + GREATER_THAN_OR_EQUAL = 4 + EQUAL = 5 + ARRAY_CONTAINS = 7 + + class UnaryFilter(object): + class Operator(enum.IntEnum): + """ + A unary operator. + + Attributes: + OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used. + IS_NAN (int): Test if a field is equal to NaN. + IS_NULL (int): Test if an exprestion evaluates to Null. + """ + + OPERATOR_UNSPECIFIED = 0 + IS_NAN = 2 + IS_NULL = 3 + + +class TargetChange(object): + class TargetChangeType(enum.IntEnum): + """ + The type of change. + + Attributes: + NO_CHANGE (int): No change has occurred. Used only to send an updated ``resume_token``. + ADD (int): The targets have been added. + REMOVE (int): The targets have been removed. + CURRENT (int): The targets reflect all changes committed before the targets were added + to the stream. + + This will be sent after or with a ``read_time`` that is greater than or + equal to the time at which the targets were added. + + Listeners can wait for this change if read-after-write semantics are + desired. + RESET (int): The targets have been reset, and a new initial state for the targets + will be returned in subsequent changes. + + After the initial state is complete, ``CURRENT`` will be returned even + if the target was previously indicated to be ``CURRENT``. + """ + + NO_CHANGE = 0 + ADD = 1 + REMOVE = 2 + CURRENT = 3 + RESET = 4 diff --git a/firestore/google/cloud/firestore_v1/gapic/firestore_client.py b/firestore/google/cloud/firestore_v1/gapic/firestore_client.py new file mode 100644 index 000000000000..deee20ea3960 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/gapic/firestore_client.py @@ -0,0 +1,1326 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Accesses the google.firestore.v1 Firestore API.""" + +import functools +import pkg_resources +import warnings + +from google.oauth2 import service_account +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.grpc_helpers +import google.api_core.page_iterator +import google.api_core.path_template +import google.api_core.protobuf_helpers +import grpc + +from google.cloud.firestore_v1.gapic import enums +from google.cloud.firestore_v1.gapic import firestore_client_config +from google.cloud.firestore_v1.gapic.transports import firestore_grpc_transport +from google.cloud.firestore_v1.proto import common_pb2 +from google.cloud.firestore_v1.proto import document_pb2 +from google.cloud.firestore_v1.proto import firestore_pb2 +from google.cloud.firestore_v1.proto import firestore_pb2_grpc +from google.cloud.firestore_v1.proto import query_pb2 +from google.cloud.firestore_v1.proto import write_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import timestamp_pb2 + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-firestore" +).version + + +class FirestoreClient(object): + """ + The Cloud Firestore service. + + This service exposes several types of comparable timestamps: + + - ``create_time`` - The time at which a document was created. Changes + only when a document is deleted, then re-created. Increases in a + strict monotonic fashion. + - ``update_time`` - The time at which a document was last updated. + Changes every time a document is modified. Does not change when a + write results in no modifications. Increases in a strict monotonic + fashion. + - ``read_time`` - The time at which a particular state was observed. + Used to denote a consistent snapshot of the database or the time at + which a Document was observed to not exist. + - ``commit_time`` - The time at which the writes in a transaction were + committed. Any read with an equal or greater ``read_time`` is + guaranteed to see the effects of the transaction. + """ + + SERVICE_ADDRESS = "firestore.googleapis.com:443" + """The default address of the service.""" + + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. + _INTERFACE_NAME = "google.firestore.v1.Firestore" + + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FirestoreClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @classmethod + def database_root_path(cls, project, database): + """Return a fully-qualified database_root string.""" + return google.api_core.path_template.expand( + "projects/{project}/databases/{database}", + project=project, + database=database, + ) + + @classmethod + def document_root_path(cls, project, database): + """Return a fully-qualified document_root string.""" + return google.api_core.path_template.expand( + "projects/{project}/databases/{database}/documents", + project=project, + database=database, + ) + + @classmethod + def document_path_path(cls, project, database, document_path): + """Return a fully-qualified document_path string.""" + return google.api_core.path_template.expand( + "projects/{project}/databases/{database}/documents/{document_path=**}", + project=project, + database=database, + document_path=document_path, + ) + + @classmethod + def any_path_path(cls, project, database, document, any_path): + """Return a fully-qualified any_path string.""" + return google.api_core.path_template.expand( + "projects/{project}/databases/{database}/documents/{document}/{any_path=**}", + project=project, + database=database, + document=document, + any_path=any_path, + ) + + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + ): + """Constructor. + + Args: + transport (Union[~.FirestoreGrpcTransport, + Callable[[~.Credentials, type], ~.FirestoreGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Raise deprecation warnings for things we want to go away. + if client_config is not None: + warnings.warn( + "The `client_config` argument is deprecated.", + PendingDeprecationWarning, + stacklevel=2, + ) + else: + client_config = firestore_client_config.config + + if channel: + warnings.warn( + "The `channel` argument is deprecated; use " "`transport` instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=firestore_grpc_transport.FirestoreGrpcTransport, + ) + else: + if credentials: + raise ValueError( + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) + self.transport = transport + else: + self.transport = firestore_grpc_transport.FirestoreGrpcTransport( + address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials + ) + + if client_info is None: + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION + ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config["interfaces"][self._INTERFACE_NAME] + ) + + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} + + # Service calls + def get_document( + self, + name, + mask=None, + transaction=None, + read_time=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets a single document. + + Example: + >>> from google.cloud import firestore_v1 + >>> + >>> client = firestore_v1.FirestoreClient() + >>> + >>> name = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]') + >>> + >>> response = client.get_document(name) + + Args: + name (str): The resource name of the Document to get. In the format: + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to return. If not set, returns all fields. + + If the document has a field that is not present in this mask, that field + will not be returned in the response. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.DocumentMask` + transaction (bytes): Reads the document in a transaction. + read_time (Union[dict, ~google.cloud.firestore_v1.types.Timestamp]): Reads the version of the document at the given time. + This may not be older than 60 seconds. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.Timestamp` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.firestore_v1.types.Document` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_document" not in self._inner_api_calls: + self._inner_api_calls[ + "get_document" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_document, + default_retry=self._method_configs["GetDocument"].retry, + default_timeout=self._method_configs["GetDocument"].timeout, + client_info=self._client_info, + ) + + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + google.api_core.protobuf_helpers.check_oneof( + transaction=transaction, read_time=read_time + ) + + request = firestore_pb2.GetDocumentRequest( + name=name, mask=mask, transaction=transaction, read_time=read_time + ) + return self._inner_api_calls["get_document"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_documents( + self, + parent, + collection_id, + page_size=None, + order_by=None, + mask=None, + transaction=None, + read_time=None, + show_missing=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists documents. + + Example: + >>> from google.cloud import firestore_v1 + >>> + >>> client = firestore_v1.FirestoreClient() + >>> + >>> parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]') + >>> + >>> # TODO: Initialize `collection_id`: + >>> collection_id = '' + >>> + >>> # Iterate over all results + >>> for element in client.list_documents(parent, collection_id): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_documents(parent, collection_id).pages: + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): The parent resource name. In the format: + ``projects/{project_id}/databases/{database_id}/documents`` or + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + For example: ``projects/my-project/databases/my-database/documents`` or + ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` + collection_id (str): The collection ID, relative to ``parent``, to list. For example: + ``chatrooms`` or ``messages``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + order_by (str): The order to sort results by. For example: ``priority desc, name``. + mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to return. If not set, returns all fields. + + If a document has a field that is not present in this mask, that field + will not be returned in the response. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.DocumentMask` + transaction (bytes): Reads documents in a transaction. + read_time (Union[dict, ~google.cloud.firestore_v1.types.Timestamp]): Reads documents as they were at the given time. + This may not be older than 60 seconds. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.Timestamp` + show_missing (bool): If the list should show missing documents. A missing document is a + document that does not exist but has sub-documents. These documents will + be returned with a key but will not have fields, + ``Document.create_time``, or ``Document.update_time`` set. + + Requests with ``show_missing`` may not specify ``where`` or + ``order_by``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.firestore_v1.types.Document` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_documents" not in self._inner_api_calls: + self._inner_api_calls[ + "list_documents" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_documents, + default_retry=self._method_configs["ListDocuments"].retry, + default_timeout=self._method_configs["ListDocuments"].timeout, + client_info=self._client_info, + ) + + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + google.api_core.protobuf_helpers.check_oneof( + transaction=transaction, read_time=read_time + ) + + request = firestore_pb2.ListDocumentsRequest( + parent=parent, + collection_id=collection_id, + page_size=page_size, + order_by=order_by, + mask=mask, + transaction=transaction, + read_time=read_time, + show_missing=show_missing, + ) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_documents"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="documents", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator + + def create_document( + self, + parent, + collection_id, + document_id, + document, + mask=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Creates a new document. + + Example: + >>> from google.cloud import firestore_v1 + >>> + >>> client = firestore_v1.FirestoreClient() + >>> + >>> parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]') + >>> + >>> # TODO: Initialize `collection_id`: + >>> collection_id = '' + >>> + >>> # TODO: Initialize `document_id`: + >>> document_id = '' + >>> + >>> # TODO: Initialize `document`: + >>> document = {} + >>> + >>> response = client.create_document(parent, collection_id, document_id, document) + + Args: + parent (str): The parent resource. For example: + ``projects/{project_id}/databases/{database_id}/documents`` or + ``projects/{project_id}/databases/{database_id}/documents/chatrooms/{chatroom_id}`` + collection_id (str): The collection ID, relative to ``parent``, to list. For example: + ``chatrooms``. + document_id (str): The client-assigned document ID to use for this document. + + Optional. If not specified, an ID will be assigned by the service. + document (Union[dict, ~google.cloud.firestore_v1.types.Document]): The document to create. ``name`` must not be set. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.Document` + mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to return. If not set, returns all fields. + + If the document has a field that is not present in this mask, that field + will not be returned in the response. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.DocumentMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.firestore_v1.types.Document` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_document" not in self._inner_api_calls: + self._inner_api_calls[ + "create_document" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_document, + default_retry=self._method_configs["CreateDocument"].retry, + default_timeout=self._method_configs["CreateDocument"].timeout, + client_info=self._client_info, + ) + + request = firestore_pb2.CreateDocumentRequest( + parent=parent, + collection_id=collection_id, + document_id=document_id, + document=document, + mask=mask, + ) + return self._inner_api_calls["create_document"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def update_document( + self, + document, + update_mask, + mask=None, + current_document=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Updates or inserts a document. + + Example: + >>> from google.cloud import firestore_v1 + >>> + >>> client = firestore_v1.FirestoreClient() + >>> + >>> # TODO: Initialize `document`: + >>> document = {} + >>> + >>> # TODO: Initialize `update_mask`: + >>> update_mask = {} + >>> + >>> response = client.update_document(document, update_mask) + + Args: + document (Union[dict, ~google.cloud.firestore_v1.types.Document]): The updated document. + Creates the document if it does not already exist. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.Document` + update_mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to update. + None of the field paths in the mask may contain a reserved name. + + If the document exists on the server and has fields not referenced in the + mask, they are left unchanged. + Fields referenced in the mask, but not present in the input document, are + deleted from the document on the server. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.DocumentMask` + mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to return. If not set, returns all fields. + + If the document has a field that is not present in this mask, that field + will not be returned in the response. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.DocumentMask` + current_document (Union[dict, ~google.cloud.firestore_v1.types.Precondition]): An optional precondition on the document. + The request will fail if this is set and not met by the target document. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.Precondition` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.firestore_v1.types.Document` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "update_document" not in self._inner_api_calls: + self._inner_api_calls[ + "update_document" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_document, + default_retry=self._method_configs["UpdateDocument"].retry, + default_timeout=self._method_configs["UpdateDocument"].timeout, + client_info=self._client_info, + ) + + request = firestore_pb2.UpdateDocumentRequest( + document=document, + update_mask=update_mask, + mask=mask, + current_document=current_document, + ) + return self._inner_api_calls["update_document"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def delete_document( + self, + name, + current_document=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Deletes a document. + + Example: + >>> from google.cloud import firestore_v1 + >>> + >>> client = firestore_v1.FirestoreClient() + >>> + >>> name = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]') + >>> + >>> client.delete_document(name) + + Args: + name (str): The resource name of the Document to delete. In the format: + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + current_document (Union[dict, ~google.cloud.firestore_v1.types.Precondition]): An optional precondition on the document. + The request will fail if this is set and not met by the target document. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.Precondition` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_document" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_document" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_document, + default_retry=self._method_configs["DeleteDocument"].retry, + default_timeout=self._method_configs["DeleteDocument"].timeout, + client_info=self._client_info, + ) + + request = firestore_pb2.DeleteDocumentRequest( + name=name, current_document=current_document + ) + self._inner_api_calls["delete_document"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def batch_get_documents( + self, + database, + documents, + mask=None, + transaction=None, + new_transaction=None, + read_time=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets multiple documents. + + Documents returned by this method are not guaranteed to be returned in the + same order that they were requested. + + Example: + >>> from google.cloud import firestore_v1 + >>> + >>> client = firestore_v1.FirestoreClient() + >>> + >>> database = client.database_root_path('[PROJECT]', '[DATABASE]') + >>> + >>> # TODO: Initialize `documents`: + >>> documents = [] + >>> + >>> for element in client.batch_get_documents(database, documents): + ... # process element + ... pass + + Args: + database (str): The database name. In the format: + ``projects/{project_id}/databases/{database_id}``. + documents (list[str]): The names of the documents to retrieve. In the format: + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + The request will fail if any of the document is not a child resource of + the given ``database``. Duplicate names will be elided. + mask (Union[dict, ~google.cloud.firestore_v1.types.DocumentMask]): The fields to return. If not set, returns all fields. + + If a document has a field that is not present in this mask, that field will + not be returned in the response. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.DocumentMask` + transaction (bytes): Reads documents in a transaction. + new_transaction (Union[dict, ~google.cloud.firestore_v1.types.TransactionOptions]): Starts a new transaction and reads the documents. + Defaults to a read-only transaction. + The new transaction ID will be returned as the first response in the + stream. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.TransactionOptions` + read_time (Union[dict, ~google.cloud.firestore_v1.types.Timestamp]): Reads documents as they were at the given time. + This may not be older than 60 seconds. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.Timestamp` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + Iterable[~google.cloud.firestore_v1.types.BatchGetDocumentsResponse]. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "batch_get_documents" not in self._inner_api_calls: + self._inner_api_calls[ + "batch_get_documents" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.batch_get_documents, + default_retry=self._method_configs["BatchGetDocuments"].retry, + default_timeout=self._method_configs["BatchGetDocuments"].timeout, + client_info=self._client_info, + ) + + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + google.api_core.protobuf_helpers.check_oneof( + transaction=transaction, + new_transaction=new_transaction, + read_time=read_time, + ) + + request = firestore_pb2.BatchGetDocumentsRequest( + database=database, + documents=documents, + mask=mask, + transaction=transaction, + new_transaction=new_transaction, + read_time=read_time, + ) + return self._inner_api_calls["batch_get_documents"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def begin_transaction( + self, + database, + options_=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Starts a new transaction. + + Example: + >>> from google.cloud import firestore_v1 + >>> + >>> client = firestore_v1.FirestoreClient() + >>> + >>> database = client.database_root_path('[PROJECT]', '[DATABASE]') + >>> + >>> response = client.begin_transaction(database) + + Args: + database (str): The database name. In the format: + ``projects/{project_id}/databases/{database_id}``. + options_ (Union[dict, ~google.cloud.firestore_v1.types.TransactionOptions]): The options for the transaction. + Defaults to a read-write transaction. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.TransactionOptions` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.firestore_v1.types.BeginTransactionResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "begin_transaction" not in self._inner_api_calls: + self._inner_api_calls[ + "begin_transaction" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.begin_transaction, + default_retry=self._method_configs["BeginTransaction"].retry, + default_timeout=self._method_configs["BeginTransaction"].timeout, + client_info=self._client_info, + ) + + request = firestore_pb2.BeginTransactionRequest( + database=database, options=options_ + ) + return self._inner_api_calls["begin_transaction"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def commit( + self, + database, + writes, + transaction=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Commits a transaction, while optionally updating documents. + + Example: + >>> from google.cloud import firestore_v1 + >>> + >>> client = firestore_v1.FirestoreClient() + >>> + >>> database = client.database_root_path('[PROJECT]', '[DATABASE]') + >>> + >>> # TODO: Initialize `writes`: + >>> writes = [] + >>> + >>> response = client.commit(database, writes) + + Args: + database (str): The database name. In the format: + ``projects/{project_id}/databases/{database_id}``. + writes (list[Union[dict, ~google.cloud.firestore_v1.types.Write]]): The writes to apply. + + Always executed atomically and in order. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.Write` + transaction (bytes): If set, applies all writes in this transaction, and commits it. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.firestore_v1.types.CommitResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "commit" not in self._inner_api_calls: + self._inner_api_calls[ + "commit" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.commit, + default_retry=self._method_configs["Commit"].retry, + default_timeout=self._method_configs["Commit"].timeout, + client_info=self._client_info, + ) + + request = firestore_pb2.CommitRequest( + database=database, writes=writes, transaction=transaction + ) + return self._inner_api_calls["commit"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def rollback( + self, + database, + transaction, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Rolls back a transaction. + + Example: + >>> from google.cloud import firestore_v1 + >>> + >>> client = firestore_v1.FirestoreClient() + >>> + >>> database = client.database_root_path('[PROJECT]', '[DATABASE]') + >>> + >>> # TODO: Initialize `transaction`: + >>> transaction = b'' + >>> + >>> client.rollback(database, transaction) + + Args: + database (str): The database name. In the format: + ``projects/{project_id}/databases/{database_id}``. + transaction (bytes): The transaction to roll back. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "rollback" not in self._inner_api_calls: + self._inner_api_calls[ + "rollback" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.rollback, + default_retry=self._method_configs["Rollback"].retry, + default_timeout=self._method_configs["Rollback"].timeout, + client_info=self._client_info, + ) + + request = firestore_pb2.RollbackRequest( + database=database, transaction=transaction + ) + self._inner_api_calls["rollback"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def run_query( + self, + parent, + structured_query=None, + transaction=None, + new_transaction=None, + read_time=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Runs a query. + + Example: + >>> from google.cloud import firestore_v1 + >>> + >>> client = firestore_v1.FirestoreClient() + >>> + >>> parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]') + >>> + >>> for element in client.run_query(parent): + ... # process element + ... pass + + Args: + parent (str): The parent resource name. In the format: + ``projects/{project_id}/databases/{database_id}/documents`` or + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + For example: ``projects/my-project/databases/my-database/documents`` or + ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` + structured_query (Union[dict, ~google.cloud.firestore_v1.types.StructuredQuery]): A structured query. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.StructuredQuery` + transaction (bytes): Reads documents in a transaction. + new_transaction (Union[dict, ~google.cloud.firestore_v1.types.TransactionOptions]): Starts a new transaction and reads the documents. + Defaults to a read-only transaction. + The new transaction ID will be returned as the first response in the + stream. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.TransactionOptions` + read_time (Union[dict, ~google.cloud.firestore_v1.types.Timestamp]): Reads documents as they were at the given time. + This may not be older than 60 seconds. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.firestore_v1.types.Timestamp` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + Iterable[~google.cloud.firestore_v1.types.RunQueryResponse]. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "run_query" not in self._inner_api_calls: + self._inner_api_calls[ + "run_query" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.run_query, + default_retry=self._method_configs["RunQuery"].retry, + default_timeout=self._method_configs["RunQuery"].timeout, + client_info=self._client_info, + ) + + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + google.api_core.protobuf_helpers.check_oneof(structured_query=structured_query) + + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + google.api_core.protobuf_helpers.check_oneof( + transaction=transaction, + new_transaction=new_transaction, + read_time=read_time, + ) + + request = firestore_pb2.RunQueryRequest( + parent=parent, + structured_query=structured_query, + transaction=transaction, + new_transaction=new_transaction, + read_time=read_time, + ) + return self._inner_api_calls["run_query"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def write( + self, + requests, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Streams batches of document updates and deletes, in order. + + EXPERIMENTAL: This method interface might change in the future. + + Example: + >>> from google.cloud import firestore_v1 + >>> + >>> client = firestore_v1.FirestoreClient() + >>> + >>> database = client.database_root_path('[PROJECT]', '[DATABASE]') + >>> request = {'database': database} + >>> + >>> requests = [request] + >>> for element in client.write(requests): + ... # process element + ... pass + + Args: + requests (iterator[dict|google.cloud.firestore_v1.proto.firestore_pb2.WriteRequest]): The input objects. If a dict is provided, it must be of the + same form as the protobuf message :class:`~google.cloud.firestore_v1.types.WriteRequest` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + Iterable[~google.cloud.firestore_v1.types.WriteResponse]. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "write" not in self._inner_api_calls: + self._inner_api_calls[ + "write" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.write, + default_retry=self._method_configs["Write"].retry, + default_timeout=self._method_configs["Write"].timeout, + client_info=self._client_info, + ) + + return self._inner_api_calls["write"]( + requests, retry=retry, timeout=timeout, metadata=metadata + ) + + def listen( + self, + requests, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Listens to changes. + + EXPERIMENTAL: This method interface might change in the future. + + Example: + >>> from google.cloud import firestore_v1 + >>> + >>> client = firestore_v1.FirestoreClient() + >>> + >>> database = client.database_root_path('[PROJECT]', '[DATABASE]') + >>> request = {'database': database} + >>> + >>> requests = [request] + >>> for element in client.listen(requests): + ... # process element + ... pass + + Args: + requests (iterator[dict|google.cloud.firestore_v1.proto.firestore_pb2.ListenRequest]): The input objects. If a dict is provided, it must be of the + same form as the protobuf message :class:`~google.cloud.firestore_v1.types.ListenRequest` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + Iterable[~google.cloud.firestore_v1.types.ListenResponse]. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "listen" not in self._inner_api_calls: + self._inner_api_calls[ + "listen" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.listen, + default_retry=self._method_configs["Listen"].retry, + default_timeout=self._method_configs["Listen"].timeout, + client_info=self._client_info, + ) + + return self._inner_api_calls["listen"]( + requests, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_collection_ids( + self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists all the collection IDs underneath a document. + + Example: + >>> from google.cloud import firestore_v1 + >>> + >>> client = firestore_v1.FirestoreClient() + >>> + >>> parent = client.any_path_path('[PROJECT]', '[DATABASE]', '[DOCUMENT]', '[ANY_PATH]') + >>> + >>> # Iterate over all results + >>> for element in client.list_collection_ids(parent): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_collection_ids(parent).pages: + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): The parent document. In the format: + ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. + For example: + ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`str` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_collection_ids" not in self._inner_api_calls: + self._inner_api_calls[ + "list_collection_ids" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_collection_ids, + default_retry=self._method_configs["ListCollectionIds"].retry, + default_timeout=self._method_configs["ListCollectionIds"].timeout, + client_info=self._client_info, + ) + + request = firestore_pb2.ListCollectionIdsRequest( + parent=parent, page_size=page_size + ) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_collection_ids"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="collection_ids", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator diff --git a/firestore/google/cloud/firestore_v1/gapic/firestore_client_config.py b/firestore/google/cloud/firestore_v1/gapic/firestore_client_config.py new file mode 100644 index 000000000000..126dfb22d2ab --- /dev/null +++ b/firestore/google/cloud/firestore_v1/gapic/firestore_client_config.py @@ -0,0 +1,97 @@ +config = { + "interfaces": { + "google.firestore.v1.Firestore": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000, + }, + "streaming": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 300000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 300000, + "total_timeout_millis": 600000, + }, + }, + "methods": { + "GetDocument": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "ListDocuments": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "CreateDocument": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "UpdateDocument": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "DeleteDocument": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "BatchGetDocuments": { + "timeout_millis": 300000, + "retry_codes_name": "idempotent", + "retry_params_name": "streaming", + }, + "BeginTransaction": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "Commit": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "Rollback": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "RunQuery": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "streaming", + }, + "Write": { + "timeout_millis": 86400000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "streaming", + }, + "Listen": { + "timeout_millis": 86400000, + "retry_codes_name": "idempotent", + "retry_params_name": "streaming", + }, + "ListCollectionIds": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + }, + } + } +} diff --git a/firestore/google/cloud/firestore_v1/gapic/transports/__init__.py b/firestore/google/cloud/firestore_v1/gapic/transports/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/firestore/google/cloud/firestore_v1/gapic/transports/firestore_grpc_transport.py b/firestore/google/cloud/firestore_v1/gapic/transports/firestore_grpc_transport.py new file mode 100644 index 000000000000..df72d7050f28 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/gapic/transports/firestore_grpc_transport.py @@ -0,0 +1,269 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import google.api_core.grpc_helpers + +from google.cloud.firestore_v1.proto import firestore_pb2_grpc + + +class FirestoreGrpcTransport(object): + """gRPC transport class providing stubs for + google.firestore.v1 Firestore API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ) + + def __init__( + self, channel=None, credentials=None, address="firestore.googleapis.com:443" + ): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) + + # Create the channel. + if channel is None: + channel = self.create_channel(address=address, credentials=credentials) + + self._channel = channel + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = {"firestore_stub": firestore_pb2_grpc.FirestoreStub(channel)} + + @classmethod + def create_channel(cls, address="firestore.googleapis.com:443", credentials=None): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, credentials=credentials, scopes=cls._OAUTH_SCOPES + ) + + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + + @property + def get_document(self): + """Return the gRPC stub for :meth:`FirestoreClient.get_document`. + + Gets a single document. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["firestore_stub"].GetDocument + + @property + def list_documents(self): + """Return the gRPC stub for :meth:`FirestoreClient.list_documents`. + + Lists documents. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["firestore_stub"].ListDocuments + + @property + def create_document(self): + """Return the gRPC stub for :meth:`FirestoreClient.create_document`. + + Creates a new document. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["firestore_stub"].CreateDocument + + @property + def update_document(self): + """Return the gRPC stub for :meth:`FirestoreClient.update_document`. + + Updates or inserts a document. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["firestore_stub"].UpdateDocument + + @property + def delete_document(self): + """Return the gRPC stub for :meth:`FirestoreClient.delete_document`. + + Deletes a document. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["firestore_stub"].DeleteDocument + + @property + def batch_get_documents(self): + """Return the gRPC stub for :meth:`FirestoreClient.batch_get_documents`. + + Gets multiple documents. + + Documents returned by this method are not guaranteed to be returned in the + same order that they were requested. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["firestore_stub"].BatchGetDocuments + + @property + def begin_transaction(self): + """Return the gRPC stub for :meth:`FirestoreClient.begin_transaction`. + + Starts a new transaction. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["firestore_stub"].BeginTransaction + + @property + def commit(self): + """Return the gRPC stub for :meth:`FirestoreClient.commit`. + + Commits a transaction, while optionally updating documents. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["firestore_stub"].Commit + + @property + def rollback(self): + """Return the gRPC stub for :meth:`FirestoreClient.rollback`. + + Rolls back a transaction. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["firestore_stub"].Rollback + + @property + def run_query(self): + """Return the gRPC stub for :meth:`FirestoreClient.run_query`. + + Runs a query. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["firestore_stub"].RunQuery + + @property + def write(self): + """Return the gRPC stub for :meth:`FirestoreClient.write`. + + Streams batches of document updates and deletes, in order. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["firestore_stub"].Write + + @property + def listen(self): + """Return the gRPC stub for :meth:`FirestoreClient.listen`. + + Listens to changes. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["firestore_stub"].Listen + + @property + def list_collection_ids(self): + """Return the gRPC stub for :meth:`FirestoreClient.list_collection_ids`. + + Lists all the collection IDs underneath a document. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["firestore_stub"].ListCollectionIds diff --git a/firestore/google/cloud/firestore_v1/proto/__init__.py b/firestore/google/cloud/firestore_v1/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/firestore/google/cloud/firestore_v1/proto/common.proto b/firestore/google/cloud/firestore_v1/proto/common.proto new file mode 100644 index 000000000000..9a0ae7deae84 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/proto/common.proto @@ -0,0 +1,85 @@ +// Copyright 2018 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.firestore.v1; + +import "google/api/annotations.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.Firestore.V1"; +option go_package = "google.golang.org/genproto/googleapis/firestore/v1;firestore"; +option java_multiple_files = true; +option java_outer_classname = "CommonProto"; +option java_package = "com.google.firestore.v1"; +option objc_class_prefix = "GCFS"; +option php_namespace = "Google\\Cloud\\Firestore\\V1"; + +// A set of field paths on a document. +// Used to restrict a get or update operation on a document to a subset of its +// fields. +// This is different from standard field masks, as this is always scoped to a +// [Document][google.firestore.v1.Document], and takes in account the dynamic +// nature of [Value][google.firestore.v1.Value]. +message DocumentMask { + // The list of field paths in the mask. See + // [Document.fields][google.firestore.v1.Document.fields] for a field path + // syntax reference. + repeated string field_paths = 1; +} + +// A precondition on a document, used for conditional operations. +message Precondition { + // The type of precondition. + oneof condition_type { + // When set to `true`, the target document must exist. + // When set to `false`, the target document must not exist. + bool exists = 1; + + // When set, the target document must exist and have been last updated at + // that time. + google.protobuf.Timestamp update_time = 2; + } +} + +// Options for creating a new transaction. +message TransactionOptions { + // Options for a transaction that can be used to read and write documents. + message ReadWrite { + // An optional transaction to retry. + bytes retry_transaction = 1; + } + + // Options for a transaction that can only be used to read documents. + message ReadOnly { + // The consistency mode for this transaction. If not set, defaults to strong + // consistency. + oneof consistency_selector { + // Reads documents at the given time. + // This may not be older than 60 seconds. + google.protobuf.Timestamp read_time = 2; + } + } + + // The mode of the transaction. + oneof mode { + // The transaction can only be used for read operations. + ReadOnly read_only = 2; + + // The transaction can be used for both read and write operations. + ReadWrite read_write = 3; + } +} diff --git a/firestore/google/cloud/firestore_v1/proto/common_pb2.py b/firestore/google/cloud/firestore_v1/proto/common_pb2.py new file mode 100644 index 000000000000..b94341f7195c --- /dev/null +++ b/firestore/google/cloud/firestore_v1/proto/common_pb2.py @@ -0,0 +1,450 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/firestore_v1/proto/common.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/firestore_v1/proto/common.proto", + package="google.firestore.v1", + syntax="proto3", + serialized_options=_b( + "\n\027com.google.firestore.v1B\013CommonProtoP\001Z fields = 2; + + // Output only. The time at which the document was created. + // + // This value increases monotonically when a document is deleted then + // recreated. It can also be compared to values from other documents and + // the `read_time` of a query. + google.protobuf.Timestamp create_time = 3; + + // Output only. The time at which the document was last changed. + // + // This value is initially set to the `create_time` then increases + // monotonically with each change to the document. It can also be + // compared to values from other documents and the `read_time` of a query. + google.protobuf.Timestamp update_time = 4; +} + +// A message that can hold any of the supported value types. +message Value { + // Must have a value set. + oneof value_type { + // A null value. + google.protobuf.NullValue null_value = 11; + + // A boolean value. + bool boolean_value = 1; + + // An integer value. + int64 integer_value = 2; + + // A double value. + double double_value = 3; + + // A timestamp value. + // + // Precise only to microseconds. When stored, any additional precision is + // rounded down. + google.protobuf.Timestamp timestamp_value = 10; + + // A string value. + // + // The string, represented as UTF-8, must not exceed 1 MiB - 89 bytes. + // Only the first 1,500 bytes of the UTF-8 representation are considered by + // queries. + string string_value = 17; + + // A bytes value. + // + // Must not exceed 1 MiB - 89 bytes. + // Only the first 1,500 bytes are considered by queries. + bytes bytes_value = 18; + + // A reference to a document. For example: + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + string reference_value = 5; + + // A geo point value representing a point on the surface of Earth. + google.type.LatLng geo_point_value = 8; + + // An array value. + // + // Cannot directly contain another array value, though can contain an + // map which contains another array. + ArrayValue array_value = 9; + + // A map value. + MapValue map_value = 6; + } +} + +// An array value. +message ArrayValue { + // Values in the array. + repeated Value values = 1; +} + +// A map value. +message MapValue { + // The map's fields. + // + // The map keys represent field names. Field names matching the regular + // expression `__.*__` are reserved. Reserved field names are forbidden except + // in certain documented contexts. The map keys, represented as UTF-8, must + // not exceed 1,500 bytes and cannot be empty. + map fields = 1; +} diff --git a/firestore/google/cloud/firestore_v1/proto/document_pb2.py b/firestore/google/cloud/firestore_v1/proto/document_pb2.py new file mode 100644 index 000000000000..164880a7ecae --- /dev/null +++ b/firestore/google/cloud/firestore_v1/proto/document_pb2.py @@ -0,0 +1,797 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/firestore_v1/proto/document.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/firestore_v1/proto/document.proto", + package="google.firestore.v1", + syntax="proto3", + serialized_options=_b( + "\n\027com.google.firestore.v1B\rDocumentProtoP\001Z labels = 5; +} + +// The response for [Firestore.Write][google.firestore.v1.Firestore.Write]. +message WriteResponse { + // The ID of the stream. + // Only set on the first message, when a new stream was created. + string stream_id = 1; + + // A token that represents the position of this response in the stream. + // This can be used by a client to resume the stream at this point. + // + // This field is always set. + bytes stream_token = 2; + + // The result of applying the writes. + // + // This i-th write result corresponds to the i-th write in the + // request. + repeated WriteResult write_results = 3; + + // The time at which the commit occurred. + google.protobuf.Timestamp commit_time = 4; +} + +// A request for [Firestore.Listen][google.firestore.v1.Firestore.Listen] +message ListenRequest { + // The database name. In the format: + // `projects/{project_id}/databases/{database_id}`. + string database = 1; + + // The supported target changes. + oneof target_change { + // A target to add to this stream. + Target add_target = 2; + + // The ID of a target to remove from this stream. + int32 remove_target = 3; + } + + // Labels associated with this target change. + map labels = 4; +} + +// The response for [Firestore.Listen][google.firestore.v1.Firestore.Listen]. +message ListenResponse { + // The supported responses. + oneof response_type { + // Targets have changed. + TargetChange target_change = 2; + + // A [Document][google.firestore.v1.Document] has changed. + DocumentChange document_change = 3; + + // A [Document][google.firestore.v1.Document] has been deleted. + DocumentDelete document_delete = 4; + + // A [Document][google.firestore.v1.Document] has been removed from a target + // (because it is no longer relevant to that target). + DocumentRemove document_remove = 6; + + // A filter to apply to the set of documents previously returned for the + // given target. + // + // Returned when documents may have been removed from the given target, but + // the exact documents are unknown. + ExistenceFilter filter = 5; + } +} + +// A specification of a set of documents to listen to. +message Target { + // A target specified by a set of documents names. + message DocumentsTarget { + // The names of the documents to retrieve. In the format: + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + // The request will fail if any of the document is not a child resource of + // the given `database`. Duplicate names will be elided. + repeated string documents = 2; + } + + // A target specified by a query. + message QueryTarget { + // The parent resource name. In the format: + // `projects/{project_id}/databases/{database_id}/documents` or + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + // For example: + // `projects/my-project/databases/my-database/documents` or + // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + string parent = 1; + + // The query to run. + oneof query_type { + // A structured query. + StructuredQuery structured_query = 2; + } + } + + // The type of target to listen to. + oneof target_type { + // A target specified by a query. + QueryTarget query = 2; + + // A target specified by a set of document names. + DocumentsTarget documents = 3; + } + + // When to start listening. + // + // If not specified, all matching Documents are returned before any + // subsequent changes. + oneof resume_type { + // A resume token from a prior + // [TargetChange][google.firestore.v1.TargetChange] for an identical target. + // + // Using a resume token with a different target is unsupported and may fail. + bytes resume_token = 4; + + // Start listening after a specific `read_time`. + // + // The client must know the state of matching documents at this time. + google.protobuf.Timestamp read_time = 11; + } + + // A client provided target ID. + // + // If not set, the server will assign an ID for the target. + // + // Used for resuming a target without changing IDs. The IDs can either be + // client-assigned or be server-assigned in a previous stream. All targets + // with client provided IDs must be added before adding a target that needs + // a server-assigned id. + int32 target_id = 5; + + // If the target should be removed once it is current and consistent. + bool once = 6; +} + +// Targets being watched have changed. +message TargetChange { + // The type of change. + enum TargetChangeType { + // No change has occurred. Used only to send an updated `resume_token`. + NO_CHANGE = 0; + + // The targets have been added. + ADD = 1; + + // The targets have been removed. + REMOVE = 2; + + // The targets reflect all changes committed before the targets were added + // to the stream. + // + // This will be sent after or with a `read_time` that is greater than or + // equal to the time at which the targets were added. + // + // Listeners can wait for this change if read-after-write semantics + // are desired. + CURRENT = 3; + + // The targets have been reset, and a new initial state for the targets + // will be returned in subsequent changes. + // + // After the initial state is complete, `CURRENT` will be returned even + // if the target was previously indicated to be `CURRENT`. + RESET = 4; + } + + // The type of change that occurred. + TargetChangeType target_change_type = 1; + + // The target IDs of targets that have changed. + // + // If empty, the change applies to all targets. + // + // For `target_change_type=ADD`, the order of the target IDs matches the order + // of the requests to add the targets. This allows clients to unambiguously + // associate server-assigned target IDs with added targets. + // + // For other states, the order of the target IDs is not defined. + repeated int32 target_ids = 2; + + // The error that resulted in this change, if applicable. + google.rpc.Status cause = 3; + + // A token that can be used to resume the stream for the given `target_ids`, + // or all targets if `target_ids` is empty. + // + // Not set on every target change. + bytes resume_token = 4; + + // The consistent `read_time` for the given `target_ids` (omitted when the + // target_ids are not at a consistent snapshot). + // + // The stream is guaranteed to send a `read_time` with `target_ids` empty + // whenever the entire stream reaches a new consistent snapshot. ADD, + // CURRENT, and RESET messages are guaranteed to (eventually) result in a + // new consistent snapshot (while NO_CHANGE and REMOVE messages are not). + // + // For a given stream, `read_time` is guaranteed to be monotonically + // increasing. + google.protobuf.Timestamp read_time = 6; +} + +// The request for +// [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. +message ListCollectionIdsRequest { + // The parent document. In the format: + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + // For example: + // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + string parent = 1; + + // The maximum number of results to return. + int32 page_size = 2; + + // A page token. Must be a value from + // [ListCollectionIdsResponse][google.firestore.v1.ListCollectionIdsResponse]. + string page_token = 3; +} + +// The response from +// [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. +message ListCollectionIdsResponse { + // The collection ids. + repeated string collection_ids = 1; + + // A page token that may be used to continue the list. + string next_page_token = 2; +} diff --git a/firestore/google/cloud/firestore_v1/proto/firestore_pb2.py b/firestore/google/cloud/firestore_v1/proto/firestore_pb2.py new file mode 100644 index 000000000000..0e1d37e3dab2 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/proto/firestore_pb2.py @@ -0,0 +1,3783 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/firestore_v1/proto/firestore.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.cloud.firestore_v1.proto import ( + common_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2, +) +from google.cloud.firestore_v1.proto import ( + document_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2, +) +from google.cloud.firestore_v1.proto import ( + query_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2, +) +from google.cloud.firestore_v1.proto import ( + write_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_write__pb2, +) +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/firestore_v1/proto/firestore.proto", + package="google.firestore.v1", + syntax="proto3", + serialized_options=_b( + "\n\027com.google.firestore.v1B\016FirestoreProtoP\001Z\n\x06labels\x18\x04 \x03(\x0b\x32..google.firestore.v1.ListenRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0f\n\rtarget_change"\xd5\x02\n\x0eListenResponse\x12:\n\rtarget_change\x18\x02 \x01(\x0b\x32!.google.firestore.v1.TargetChangeH\x00\x12>\n\x0f\x64ocument_change\x18\x03 \x01(\x0b\x32#.google.firestore.v1.DocumentChangeH\x00\x12>\n\x0f\x64ocument_delete\x18\x04 \x01(\x0b\x32#.google.firestore.v1.DocumentDeleteH\x00\x12>\n\x0f\x64ocument_remove\x18\x06 \x01(\x0b\x32#.google.firestore.v1.DocumentRemoveH\x00\x12\x36\n\x06\x66ilter\x18\x05 \x01(\x0b\x32$.google.firestore.v1.ExistenceFilterH\x00\x42\x0f\n\rresponse_type"\xa1\x03\n\x06Target\x12\x38\n\x05query\x18\x02 \x01(\x0b\x32\'.google.firestore.v1.Target.QueryTargetH\x00\x12@\n\tdocuments\x18\x03 \x01(\x0b\x32+.google.firestore.v1.Target.DocumentsTargetH\x00\x12\x16\n\x0cresume_token\x18\x04 \x01(\x0cH\x01\x12/\n\tread_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x12\x11\n\ttarget_id\x18\x05 \x01(\x05\x12\x0c\n\x04once\x18\x06 \x01(\x08\x1a$\n\x0f\x44ocumentsTarget\x12\x11\n\tdocuments\x18\x02 \x03(\t\x1am\n\x0bQueryTarget\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12@\n\x10structured_query\x18\x02 \x01(\x0b\x32$.google.firestore.v1.StructuredQueryH\x00\x42\x0c\n\nquery_typeB\r\n\x0btarget_typeB\r\n\x0bresume_type"\xaa\x02\n\x0cTargetChange\x12N\n\x12target_change_type\x18\x01 \x01(\x0e\x32\x32.google.firestore.v1.TargetChange.TargetChangeType\x12\x12\n\ntarget_ids\x18\x02 \x03(\x05\x12!\n\x05\x63\x61use\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12\x14\n\x0cresume_token\x18\x04 \x01(\x0c\x12-\n\tread_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x10TargetChangeType\x12\r\n\tNO_CHANGE\x10\x00\x12\x07\n\x03\x41\x44\x44\x10\x01\x12\n\n\x06REMOVE\x10\x02\x12\x0b\n\x07\x43URRENT\x10\x03\x12\t\n\x05RESET\x10\x04"Q\n\x18ListCollectionIdsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"L\n\x19ListCollectionIdsResponse\x12\x16\n\x0e\x63ollection_ids\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x84\x12\n\tFirestore\x12\x8f\x01\n\x0bGetDocument\x12\'.google.firestore.v1.GetDocumentRequest\x1a\x1d.google.firestore.v1.Document"8\x82\xd3\xe4\x93\x02\x32\x12\x30/v1/{name=projects/*/databases/*/documents/*/**}\x12\xb2\x01\n\rListDocuments\x12).google.firestore.v1.ListDocumentsRequest\x1a*.google.firestore.v1.ListDocumentsResponse"J\x82\xd3\xe4\x93\x02\x44\x12\x42/v1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}\x12\xaf\x01\n\x0e\x43reateDocument\x12*.google.firestore.v1.CreateDocumentRequest\x1a\x1d.google.firestore.v1.Document"R\x82\xd3\xe4\x93\x02L"@/v1/{parent=projects/*/databases/*/documents/**}/{collection_id}:\x08\x64ocument\x12\xa8\x01\n\x0eUpdateDocument\x12*.google.firestore.v1.UpdateDocumentRequest\x1a\x1d.google.firestore.v1.Document"K\x82\xd3\xe4\x93\x02\x45\x32\x39/v1/{document.name=projects/*/databases/*/documents/*/**}:\x08\x64ocument\x12\x8e\x01\n\x0e\x44\x65leteDocument\x12*.google.firestore.v1.DeleteDocumentRequest\x1a\x16.google.protobuf.Empty"8\x82\xd3\xe4\x93\x02\x32*0/v1/{name=projects/*/databases/*/documents/*/**}\x12\xb9\x01\n\x11\x42\x61tchGetDocuments\x12-.google.firestore.v1.BatchGetDocumentsRequest\x1a..google.firestore.v1.BatchGetDocumentsResponse"C\x82\xd3\xe4\x93\x02="8/v1/{database=projects/*/databases/*}/documents:batchGet:\x01*0\x01\x12\xbc\x01\n\x10\x42\x65ginTransaction\x12,.google.firestore.v1.BeginTransactionRequest\x1a-.google.firestore.v1.BeginTransactionResponse"K\x82\xd3\xe4\x93\x02\x45"@/v1/{database=projects/*/databases/*}/documents:beginTransaction:\x01*\x12\x94\x01\n\x06\x43ommit\x12".google.firestore.v1.CommitRequest\x1a#.google.firestore.v1.CommitResponse"A\x82\xd3\xe4\x93\x02;"6/v1/{database=projects/*/databases/*}/documents:commit:\x01*\x12\x8d\x01\n\x08Rollback\x12$.google.firestore.v1.RollbackRequest\x1a\x16.google.protobuf.Empty"C\x82\xd3\xe4\x93\x02="8/v1/{database=projects/*/databases/*}/documents:rollback:\x01*\x12\xdf\x01\n\x08RunQuery\x12$.google.firestore.v1.RunQueryRequest\x1a%.google.firestore.v1.RunQueryResponse"\x83\x01\x82\xd3\xe4\x93\x02}"6/v1/{parent=projects/*/databases/*/documents}:runQuery:\x01*Z@";/v1/{parent=projects/*/databases/*/documents/*/**}:runQuery:\x01*0\x01\x12\x94\x01\n\x05Write\x12!.google.firestore.v1.WriteRequest\x1a".google.firestore.v1.WriteResponse"@\x82\xd3\xe4\x93\x02:"5/v1/{database=projects/*/databases/*}/documents:write:\x01*(\x01\x30\x01\x12\x98\x01\n\x06Listen\x12".google.firestore.v1.ListenRequest\x1a#.google.firestore.v1.ListenResponse"A\x82\xd3\xe4\x93\x02;"6/v1/{database=projects/*/databases/*}/documents:listen:\x01*(\x01\x30\x01\x12\x8b\x02\n\x11ListCollectionIds\x12-.google.firestore.v1.ListCollectionIdsRequest\x1a..google.firestore.v1.ListCollectionIdsResponse"\x96\x01\x82\xd3\xe4\x93\x02\x8f\x01"?/v1/{parent=projects/*/databases/*/documents}:listCollectionIds:\x01*ZI"D/v1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds:\x01*B\xa8\x01\n\x17\x63om.google.firestore.v1B\x0e\x46irestoreProtoP\x01Z 1` becomes + // `SELECT * FROM Foo WHERE A > 1 ORDER BY A, __name__` + repeated Order order_by = 4; + + // A starting point for the query results. + Cursor start_at = 7; + + // A end point for the query results. + Cursor end_at = 8; + + // The number of results to skip. + // + // Applies before limit, but after all other constraints. Must be >= 0 if + // specified. + int32 offset = 6; + + // The maximum number of results to return. + // + // Applies after all other constraints. + // Must be >= 0 if specified. + google.protobuf.Int32Value limit = 5; +} + +// A position in a query result set. +message Cursor { + // The values that represent a position, in the order they appear in + // the order by clause of a query. + // + // Can contain fewer values than specified in the order by clause. + repeated Value values = 1; + + // If the position is just before or just after the given values, relative + // to the sort order defined by the query. + bool before = 2; +} diff --git a/firestore/google/cloud/firestore_v1/proto/query_pb2.py b/firestore/google/cloud/firestore_v1/proto/query_pb2.py new file mode 100644 index 000000000000..c0408003ae64 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/proto/query_pb2.py @@ -0,0 +1,1186 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/firestore_v1/proto/query.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.cloud.firestore_v1.proto import ( + document_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2, +) +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/firestore_v1/proto/query.proto", + package="google.firestore.v1", + syntax="proto3", + serialized_options=_b( + "\n\027com.google.firestore.v1B\nQueryProtoP\001Z 1`` + becomes ``SELECT * FROM Foo WHERE A > 1 ORDER BY A, + __name__`` + start_at: + A starting point for the query results. + end_at: + A end point for the query results. + offset: + The number of results to skip. Applies before limit, but + after all other constraints. Must be >= 0 if specified. + limit: + The maximum number of results to return. Applies after all + other constraints. Must be >= 0 if specified. + """, + # @@protoc_insertion_point(class_scope:google.firestore.v1.StructuredQuery) + ), +) +_sym_db.RegisterMessage(StructuredQuery) +_sym_db.RegisterMessage(StructuredQuery.CollectionSelector) +_sym_db.RegisterMessage(StructuredQuery.Filter) +_sym_db.RegisterMessage(StructuredQuery.CompositeFilter) +_sym_db.RegisterMessage(StructuredQuery.FieldFilter) +_sym_db.RegisterMessage(StructuredQuery.UnaryFilter) +_sym_db.RegisterMessage(StructuredQuery.Order) +_sym_db.RegisterMessage(StructuredQuery.FieldReference) +_sym_db.RegisterMessage(StructuredQuery.Projection) + +Cursor = _reflection.GeneratedProtocolMessageType( + "Cursor", + (_message.Message,), + dict( + DESCRIPTOR=_CURSOR, + __module__="google.cloud.firestore_v1.proto.query_pb2", + __doc__="""A position in a query result set. + + + Attributes: + values: + The values that represent a position, in the order they appear + in the order by clause of a query. Can contain fewer values + than specified in the order by clause. + before: + If the position is just before or just after the given values, + relative to the sort order defined by the query. + """, + # @@protoc_insertion_point(class_scope:google.firestore.v1.Cursor) + ), +) +_sym_db.RegisterMessage(Cursor) + + +DESCRIPTOR._options = None +# @@protoc_insertion_point(module_scope) diff --git a/firestore/google/cloud/firestore_v1/proto/query_pb2_grpc.py b/firestore/google/cloud/firestore_v1/proto/query_pb2_grpc.py new file mode 100644 index 000000000000..07cb78fe03a9 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/proto/query_pb2_grpc.py @@ -0,0 +1,2 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc diff --git a/firestore/google/cloud/firestore_v1/proto/write.proto b/firestore/google/cloud/firestore_v1/proto/write.proto new file mode 100644 index 000000000000..70bb74d2344a --- /dev/null +++ b/firestore/google/cloud/firestore_v1/proto/write.proto @@ -0,0 +1,263 @@ +// Copyright 2018 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.firestore.v1; + +import "google/api/annotations.proto"; +import "google/firestore/v1/common.proto"; +import "google/firestore/v1/document.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Google.Cloud.Firestore.V1"; +option go_package = "google.golang.org/genproto/googleapis/firestore/v1;firestore"; +option java_multiple_files = true; +option java_outer_classname = "WriteProto"; +option java_package = "com.google.firestore.v1"; +option objc_class_prefix = "GCFS"; +option php_namespace = "Google\\Cloud\\Firestore\\V1"; + +// A write on a document. +message Write { + // The operation to execute. + oneof operation { + // A document to write. + Document update = 1; + + // A document name to delete. In the format: + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + string delete = 2; + + // Applies a tranformation to a document. + // At most one `transform` per document is allowed in a given request. + // An `update` cannot follow a `transform` on the same document in a given + // request. + DocumentTransform transform = 6; + } + + // The fields to update in this write. + // + // This field can be set only when the operation is `update`. + // If the mask is not set for an `update` and the document exists, any + // existing data will be overwritten. + // If the mask is set and the document on the server has fields not covered by + // the mask, they are left unchanged. + // Fields referenced in the mask, but not present in the input document, are + // deleted from the document on the server. + // The field paths in this mask must not contain a reserved field name. + DocumentMask update_mask = 3; + + // An optional precondition on the document. + // + // The write will fail if this is set and not met by the target document. + Precondition current_document = 4; +} + +// A transformation of a document. +message DocumentTransform { + // A transformation of a field of the document. + message FieldTransform { + // A value that is calculated by the server. + enum ServerValue { + // Unspecified. This value must not be used. + SERVER_VALUE_UNSPECIFIED = 0; + + // The time at which the server processed the request, with millisecond + // precision. + REQUEST_TIME = 1; + } + + // The path of the field. See + // [Document.fields][google.firestore.v1.Document.fields] for the field path + // syntax reference. + string field_path = 1; + + // The transformation to apply on the field. + oneof transform_type { + // Sets the field to the given server value. + ServerValue set_to_server_value = 2; + + // Adds the given value to the field's current value. + // + // This must be an integer or a double value. + // If the field is not an integer or double, or if the field does not yet + // exist, the transformation will set the field to the given value. + // If either of the given value or the current field value are doubles, + // both values will be interpreted as doubles. Double arithmetic and + // representation of double values follow IEEE 754 semantics. + // If there is positive/negative integer overflow, the field is resolved + // to the largest magnitude positive/negative integer. + Value increment = 3; + + // Sets the field to the maximum of its current value and the given value. + // + // This must be an integer or a double value. + // If the field is not an integer or double, or if the field does not yet + // exist, the transformation will set the field to the given value. + // If a maximum operation is applied where the field and the input value + // are of mixed types (that is - one is an integer and one is a double) + // the field takes on the type of the larger operand. If the operands are + // equivalent (e.g. 3 and 3.0), the field does not change. + // 0, 0.0, and -0.0 are all zero. The maximum of a zero stored value and + // zero input value is always the stored value. + // The maximum of any numeric value x and NaN is NaN. + Value maximum = 4; + + // Sets the field to the minimum of its current value and the given value. + // + // This must be an integer or a double value. + // If the field is not an integer or double, or if the field does not yet + // exist, the transformation will set the field to the input value. + // If a minimum operation is applied where the field and the input value + // are of mixed types (that is - one is an integer and one is a double) + // the field takes on the type of the smaller operand. If the operands are + // equivalent (e.g. 3 and 3.0), the field does not change. + // 0, 0.0, and -0.0 are all zero. The minimum of a zero stored value and + // zero input value is always the stored value. + // The minimum of any numeric value x and NaN is NaN. + Value minimum = 5; + + // Append the given elements in order if they are not already present in + // the current field value. + // If the field is not an array, or if the field does not yet exist, it is + // first set to the empty array. + // + // Equivalent numbers of different types (e.g. 3L and 3.0) are + // considered equal when checking if a value is missing. + // NaN is equal to NaN, and Null is equal to Null. + // If the input contains multiple equivalent values, only the first will + // be considered. + // + // The corresponding transform_result will be the null value. + ArrayValue append_missing_elements = 6; + + // Remove all of the given elements from the array in the field. + // If the field is not an array, or if the field does not yet exist, it is + // set to the empty array. + // + // Equivalent numbers of the different types (e.g. 3L and 3.0) are + // considered equal when deciding whether an element should be removed. + // NaN is equal to NaN, and Null is equal to Null. + // This will remove all equivalent values if there are duplicates. + // + // The corresponding transform_result will be the null value. + ArrayValue remove_all_from_array = 7; + } + } + + // The name of the document to transform. + string document = 1; + + // The list of transformations to apply to the fields of the document, in + // order. + // This must not be empty. + repeated FieldTransform field_transforms = 2; +} + +// The result of applying a write. +message WriteResult { + // The last update time of the document after applying the write. Not set + // after a `delete`. + // + // If the write did not actually change the document, this will be the + // previous update_time. + google.protobuf.Timestamp update_time = 1; + + // The results of applying each + // [DocumentTransform.FieldTransform][google.firestore.v1.DocumentTransform.FieldTransform], + // in the same order. + repeated Value transform_results = 2; +} + +// A [Document][google.firestore.v1.Document] has changed. +// +// May be the result of multiple [writes][google.firestore.v1.Write], including +// deletes, that ultimately resulted in a new value for the +// [Document][google.firestore.v1.Document]. +// +// Multiple [DocumentChange][google.firestore.v1.DocumentChange] messages may be +// returned for the same logical change, if multiple targets are affected. +message DocumentChange { + // The new state of the [Document][google.firestore.v1.Document]. + // + // If `mask` is set, contains only fields that were updated or added. + Document document = 1; + + // A set of target IDs of targets that match this document. + repeated int32 target_ids = 5; + + // A set of target IDs for targets that no longer match this document. + repeated int32 removed_target_ids = 6; +} + +// A [Document][google.firestore.v1.Document] has been deleted. +// +// May be the result of multiple [writes][google.firestore.v1.Write], including +// updates, the last of which deleted the +// [Document][google.firestore.v1.Document]. +// +// Multiple [DocumentDelete][google.firestore.v1.DocumentDelete] messages may be +// returned for the same logical delete, if multiple targets are affected. +message DocumentDelete { + // The resource name of the [Document][google.firestore.v1.Document] that was + // deleted. + string document = 1; + + // A set of target IDs for targets that previously matched this entity. + repeated int32 removed_target_ids = 6; + + // The read timestamp at which the delete was observed. + // + // Greater or equal to the `commit_time` of the delete. + google.protobuf.Timestamp read_time = 4; +} + +// A [Document][google.firestore.v1.Document] has been removed from the view of +// the targets. +// +// Sent if the document is no longer relevant to a target and is out of view. +// Can be sent instead of a DocumentDelete or a DocumentChange if the server +// can not send the new value of the document. +// +// Multiple [DocumentRemove][google.firestore.v1.DocumentRemove] messages may be +// returned for the same logical write or delete, if multiple targets are +// affected. +message DocumentRemove { + // The resource name of the [Document][google.firestore.v1.Document] that has + // gone out of view. + string document = 1; + + // A set of target IDs for targets that previously matched this document. + repeated int32 removed_target_ids = 2; + + // The read timestamp at which the remove was observed. + // + // Greater or equal to the `commit_time` of the change/delete/remove. + google.protobuf.Timestamp read_time = 4; +} + +// A digest of all the documents that match a given target. +message ExistenceFilter { + // The target ID to which this filter applies. + int32 target_id = 1; + + // The total count of documents that match + // [target_id][google.firestore.v1.ExistenceFilter.target_id]. + // + // If different from the count of documents in the client that match, the + // client must manually determine which documents no longer match the target. + int32 count = 2; +} diff --git a/firestore/google/cloud/firestore_v1/proto/write_pb2.py b/firestore/google/cloud/firestore_v1/proto/write_pb2.py new file mode 100644 index 000000000000..980c5e3fa6a4 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/proto/write_pb2.py @@ -0,0 +1,1144 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/firestore_v1/proto/write.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.cloud.firestore_v1.proto import ( + common_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2, +) +from google.cloud.firestore_v1.proto import ( + document_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2, +) +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/firestore_v1/proto/write.proto", + package="google.firestore.v1", + syntax="proto3", + serialized_options=_b( + "\n\027com.google.firestore.v1B\nWriteProtoP\001Z Date: Tue, 5 Mar 2019 16:06:02 -0500 Subject: [PATCH 2/7] Use explicitly-versioned module in examples. --- firestore/google/cloud/firestore_v1beta1/collection.py | 4 ++-- firestore/google/cloud/firestore_v1beta1/document.py | 4 ++-- firestore/google/cloud/firestore_v1beta1/query.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/firestore/google/cloud/firestore_v1beta1/collection.py b/firestore/google/cloud/firestore_v1beta1/collection.py index da76429e9622..9afd96866265 100644 --- a/firestore/google/cloud/firestore_v1beta1/collection.py +++ b/firestore/google/cloud/firestore_v1beta1/collection.py @@ -432,9 +432,9 @@ def on_snapshot(self, callback): to run when a change occurs. Example: - from google.cloud import firestore + from google.cloud import firestore_v1beta1 - db = firestore.Client() + db = firestore_v1beta1.Client() collection_ref = db.collection(u'users') def on_snapshot(collection_snapshot): diff --git a/firestore/google/cloud/firestore_v1beta1/document.py b/firestore/google/cloud/firestore_v1beta1/document.py index 292b70c7851b..17238af0d3ac 100644 --- a/firestore/google/cloud/firestore_v1beta1/document.py +++ b/firestore/google/cloud/firestore_v1beta1/document.py @@ -501,9 +501,9 @@ def on_snapshot(self, callback): when a change occurs Example: - from google.cloud import firestore + from google.cloud import firestore_v1beta1 - db = firestore.Client() + db = firestore_v1beta1.Client() collection_ref = db.collection(u'users') def on_snapshot(document_snapshot): diff --git a/firestore/google/cloud/firestore_v1beta1/query.py b/firestore/google/cloud/firestore_v1beta1/query.py index 16d92bebcaef..1191f75af4aa 100644 --- a/firestore/google/cloud/firestore_v1beta1/query.py +++ b/firestore/google/cloud/firestore_v1beta1/query.py @@ -756,9 +756,9 @@ def on_snapshot(self, callback): a change occurs. Example: - from google.cloud import firestore + from google.cloud import firestore_v1beta1 - db = firestore.Client() + db = firestore_v1beta1.Client() query_ref = db.collection(u'users').where("user", "==", u'Ada') def on_snapshot(docs, changes, read_time): From 6264ece12336ed8dd575bc354b19fad9fc3bc946 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 6 Mar 2019 15:36:55 -0500 Subject: [PATCH 3/7] Use versioned file for test protos in 'v1beta1' test runner. --- firestore/Makefile_v1beta1 | 37 ++ .../{test_pb2.py => test_v1beta1_pb2.py} | 362 +++++++++--------- .../tests/unit/v1beta1/test_cross_language.py | 4 +- 3 files changed, 220 insertions(+), 183 deletions(-) create mode 100644 firestore/Makefile_v1beta1 rename firestore/google/cloud/firestore_v1beta1/proto/{test_pb2.py => test_v1beta1_pb2.py} (79%) diff --git a/firestore/Makefile_v1beta1 b/firestore/Makefile_v1beta1 new file mode 100644 index 000000000000..69cf87f41a36 --- /dev/null +++ b/firestore/Makefile_v1beta1 @@ -0,0 +1,37 @@ +# This makefile builds the protos needed for cross-language Firestore tests. + +# Assume protoc is on the path. The proto compiler must be one that +# supports proto3 syntax. +PROTOC = protoc + +# Dependent repos. +REPO_DIR = $(HOME)/git-repos +PROTOBUF_REPO = $(REPO_DIR)/protobuf +GOOGLEAPIS_REPO = $(REPO_DIR)/googleapis +TESTS_REPO = $(REPO_DIR)/gcp/google-cloud-common + +TMPDIR = /tmp/python-fs-proto +TMPDIR_FS = $(TMPDIR)/google/cloud/firestore_v1beta1/proto + +.PHONY: sync-protos gen-protos + +gen-protos: sync-protos tweak-protos + # TODO(jba): Put the generated proto somewhere more suitable. + $(PROTOC) --python_out=google/cloud/firestore_v1beta1/proto \ + -I $(TMPDIR) \ + -I $(PROTOBUF_REPO)/src \ + -I $(GOOGLEAPIS_REPO) \ + $(TMPDIR)/test_v1beta1.proto + +tweak-protos: + mkdir -p $(TMPDIR_FS) + cp $(GOOGLEAPIS_REPO)/google/firestore/v1beta1/*.proto $(TMPDIR_FS) + sed -i -e 's@google/firestore/v1beta1@google/cloud/firestore_v1beta1/proto@' $(TMPDIR_FS)/*.proto + cp $(TESTS_REPO)/testing/firestore/proto/test_v1beta1.proto $(TMPDIR) + sed -i -e 's@package tests@package tests.v1beta1@' $(TMPDIR)/test_v1beta1.proto + sed -i -e 's@google/firestore/v1beta1@google/cloud/firestore_v1beta1/proto@' $(TMPDIR)/test_v1beta1.proto + +sync-protos: + cd $(PROTOBUF_REPO); git pull + cd $(GOOGLEAPIS_REPO); git pull + #cd $(TESTS_REPO); git pull diff --git a/firestore/google/cloud/firestore_v1beta1/proto/test_pb2.py b/firestore/google/cloud/firestore_v1beta1/proto/test_v1beta1_pb2.py similarity index 79% rename from firestore/google/cloud/firestore_v1beta1/proto/test_pb2.py rename to firestore/google/cloud/firestore_v1beta1/proto/test_v1beta1_pb2.py index bc025b0f3681..18dc58706837 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/test_pb2.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/test_v1beta1_pb2.py @@ -1,5 +1,5 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: test.proto +# source: test_v1beta1.proto import sys @@ -31,11 +31,11 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name="test.proto", - package="tests", + name="test_v1beta1.proto", + package="tests.v1beta1", syntax="proto3", serialized_pb=_b( - '\n\ntest.proto\x12\x05tests\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x34google/cloud/firestore_v1beta1/proto/firestore.proto\x1a\x30google/cloud/firestore_v1beta1/proto/query.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\'\n\tTestSuite\x12\x1a\n\x05tests\x18\x01 \x03(\x0b\x32\x0b.tests.Test"\xc8\x02\n\x04Test\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12\x1d\n\x03get\x18\x02 \x01(\x0b\x32\x0e.tests.GetTestH\x00\x12#\n\x06\x63reate\x18\x03 \x01(\x0b\x32\x11.tests.CreateTestH\x00\x12\x1d\n\x03set\x18\x04 \x01(\x0b\x32\x0e.tests.SetTestH\x00\x12#\n\x06update\x18\x05 \x01(\x0b\x32\x11.tests.UpdateTestH\x00\x12.\n\x0cupdate_paths\x18\x06 \x01(\x0b\x32\x16.tests.UpdatePathsTestH\x00\x12#\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x11.tests.DeleteTestH\x00\x12!\n\x05query\x18\x08 \x01(\x0b\x32\x10.tests.QueryTestH\x00\x12#\n\x06listen\x18\t \x01(\x0b\x32\x11.tests.ListenTestH\x00\x42\x06\n\x04test"^\n\x07GetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12=\n\x07request\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.GetDocumentRequest"\x81\x01\n\nCreateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t\x12\x38\n\x07request\x18\x03 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xa0\x01\n\x07SetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12 \n\x06option\x18\x02 \x01(\x0b\x32\x10.tests.SetOption\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x38\n\x07request\x18\x04 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xbf\x01\n\nUpdateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x38\n\x07request\x18\x04 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xed\x01\n\x0fUpdatePathsTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12%\n\x0b\x66ield_paths\x18\x03 \x03(\x0b\x32\x10.tests.FieldPath\x12\x13\n\x0bjson_values\x18\x04 \x03(\t\x12\x38\n\x07request\x18\x05 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x06 \x01(\x08"\xac\x01\n\nDeleteTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12\x38\n\x07request\x18\x03 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08":\n\tSetOption\x12\x0b\n\x03\x61ll\x18\x01 \x01(\x08\x12 \n\x06\x66ields\x18\x02 \x03(\x0b\x32\x10.tests.FieldPath"\x8a\x01\n\tQueryTest\x12\x11\n\tcoll_path\x18\x01 \x01(\t\x12\x1e\n\x07\x63lauses\x18\x02 \x03(\x0b\x32\r.tests.Clause\x12\x38\n\x05query\x18\x03 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQuery\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xa8\x02\n\x06\x43lause\x12\x1f\n\x06select\x18\x01 \x01(\x0b\x32\r.tests.SelectH\x00\x12\x1d\n\x05where\x18\x02 \x01(\x0b\x32\x0c.tests.WhereH\x00\x12"\n\x08order_by\x18\x03 \x01(\x0b\x32\x0e.tests.OrderByH\x00\x12\x10\n\x06offset\x18\x04 \x01(\x05H\x00\x12\x0f\n\x05limit\x18\x05 \x01(\x05H\x00\x12!\n\x08start_at\x18\x06 \x01(\x0b\x32\r.tests.CursorH\x00\x12$\n\x0bstart_after\x18\x07 \x01(\x0b\x32\r.tests.CursorH\x00\x12\x1f\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32\r.tests.CursorH\x00\x12#\n\nend_before\x18\t \x01(\x0b\x32\r.tests.CursorH\x00\x42\x08\n\x06\x63lause"*\n\x06Select\x12 \n\x06\x66ields\x18\x01 \x03(\x0b\x32\x10.tests.FieldPath"G\n\x05Where\x12\x1e\n\x04path\x18\x01 \x01(\x0b\x32\x10.tests.FieldPath\x12\n\n\x02op\x18\x02 \x01(\t\x12\x12\n\njson_value\x18\x03 \x01(\t"<\n\x07OrderBy\x12\x1e\n\x04path\x18\x01 \x01(\x0b\x32\x10.tests.FieldPath\x12\x11\n\tdirection\x18\x02 \x01(\t"G\n\x06\x43ursor\x12(\n\x0c\x64oc_snapshot\x18\x01 \x01(\x0b\x32\x12.tests.DocSnapshot\x12\x13\n\x0bjson_values\x18\x02 \x03(\t".\n\x0b\x44ocSnapshot\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t"\x1a\n\tFieldPath\x12\r\n\x05\x66ield\x18\x01 \x03(\t"\x7f\n\nListenTest\x12;\n\tresponses\x18\x01 \x03(\x0b\x32(.google.firestore.v1beta1.ListenResponse\x12"\n\tsnapshots\x18\x02 \x03(\x0b\x32\x0f.tests.Snapshot\x12\x10\n\x08is_error\x18\x03 \x01(\x08"\x8e\x01\n\x08Snapshot\x12\x30\n\x04\x64ocs\x18\x01 \x03(\x0b\x32".google.firestore.v1beta1.Document\x12!\n\x07\x63hanges\x18\x02 \x03(\x0b\x32\x10.tests.DocChange\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xcb\x01\n\tDocChange\x12#\n\x04kind\x18\x01 \x01(\x0e\x32\x15.tests.DocChange.Kind\x12/\n\x03\x64oc\x18\x02 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12\x11\n\told_index\x18\x03 \x01(\x05\x12\x11\n\tnew_index\x18\x04 \x01(\x05"B\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\t\n\x05\x41\x44\x44\x45\x44\x10\x01\x12\x0b\n\x07REMOVED\x10\x02\x12\x0c\n\x08MODIFIED\x10\x03\x42x\n&com.google.cloud.firestore.conformance\xaa\x02"Google.Cloud.Firestore.Tests.Proto\xca\x02(Google\\Cloud\\Firestore\\Tests\\Conformanceb\x06proto3' + '\n\x12test_v1beta1.proto\x12\rtests.v1beta1\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x34google/cloud/firestore_v1beta1/proto/firestore.proto\x1a\x30google/cloud/firestore_v1beta1/proto/query.proto\x1a\x1fgoogle/protobuf/timestamp.proto"/\n\tTestSuite\x12"\n\x05tests\x18\x01 \x03(\x0b\x32\x13.tests.v1beta1.Test"\x88\x03\n\x04Test\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12%\n\x03get\x18\x02 \x01(\x0b\x32\x16.tests.v1beta1.GetTestH\x00\x12+\n\x06\x63reate\x18\x03 \x01(\x0b\x32\x19.tests.v1beta1.CreateTestH\x00\x12%\n\x03set\x18\x04 \x01(\x0b\x32\x16.tests.v1beta1.SetTestH\x00\x12+\n\x06update\x18\x05 \x01(\x0b\x32\x19.tests.v1beta1.UpdateTestH\x00\x12\x36\n\x0cupdate_paths\x18\x06 \x01(\x0b\x32\x1e.tests.v1beta1.UpdatePathsTestH\x00\x12+\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x19.tests.v1beta1.DeleteTestH\x00\x12)\n\x05query\x18\x08 \x01(\x0b\x32\x18.tests.v1beta1.QueryTestH\x00\x12+\n\x06listen\x18\t \x01(\x0b\x32\x19.tests.v1beta1.ListenTestH\x00\x42\x06\n\x04test"^\n\x07GetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12=\n\x07request\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.GetDocumentRequest"\x81\x01\n\nCreateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t\x12\x38\n\x07request\x18\x03 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xa8\x01\n\x07SetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12(\n\x06option\x18\x02 \x01(\x0b\x32\x18.tests.v1beta1.SetOption\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x38\n\x07request\x18\x04 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xbf\x01\n\nUpdateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x38\n\x07request\x18\x04 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xf5\x01\n\x0fUpdatePathsTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12-\n\x0b\x66ield_paths\x18\x03 \x03(\x0b\x32\x18.tests.v1beta1.FieldPath\x12\x13\n\x0bjson_values\x18\x04 \x03(\t\x12\x38\n\x07request\x18\x05 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x06 \x01(\x08"\xac\x01\n\nDeleteTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12\x38\n\x07request\x18\x03 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"B\n\tSetOption\x12\x0b\n\x03\x61ll\x18\x01 \x01(\x08\x12(\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x18.tests.v1beta1.FieldPath"\x92\x01\n\tQueryTest\x12\x11\n\tcoll_path\x18\x01 \x01(\t\x12&\n\x07\x63lauses\x18\x02 \x03(\x0b\x32\x15.tests.v1beta1.Clause\x12\x38\n\x05query\x18\x03 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQuery\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xe0\x02\n\x06\x43lause\x12\'\n\x06select\x18\x01 \x01(\x0b\x32\x15.tests.v1beta1.SelectH\x00\x12%\n\x05where\x18\x02 \x01(\x0b\x32\x14.tests.v1beta1.WhereH\x00\x12*\n\x08order_by\x18\x03 \x01(\x0b\x32\x16.tests.v1beta1.OrderByH\x00\x12\x10\n\x06offset\x18\x04 \x01(\x05H\x00\x12\x0f\n\x05limit\x18\x05 \x01(\x05H\x00\x12)\n\x08start_at\x18\x06 \x01(\x0b\x32\x15.tests.v1beta1.CursorH\x00\x12,\n\x0bstart_after\x18\x07 \x01(\x0b\x32\x15.tests.v1beta1.CursorH\x00\x12\'\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32\x15.tests.v1beta1.CursorH\x00\x12+\n\nend_before\x18\t \x01(\x0b\x32\x15.tests.v1beta1.CursorH\x00\x42\x08\n\x06\x63lause"2\n\x06Select\x12(\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x18.tests.v1beta1.FieldPath"O\n\x05Where\x12&\n\x04path\x18\x01 \x01(\x0b\x32\x18.tests.v1beta1.FieldPath\x12\n\n\x02op\x18\x02 \x01(\t\x12\x12\n\njson_value\x18\x03 \x01(\t"D\n\x07OrderBy\x12&\n\x04path\x18\x01 \x01(\x0b\x32\x18.tests.v1beta1.FieldPath\x12\x11\n\tdirection\x18\x02 \x01(\t"O\n\x06\x43ursor\x12\x30\n\x0c\x64oc_snapshot\x18\x01 \x01(\x0b\x32\x1a.tests.v1beta1.DocSnapshot\x12\x13\n\x0bjson_values\x18\x02 \x03(\t".\n\x0b\x44ocSnapshot\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t"\x1a\n\tFieldPath\x12\r\n\x05\x66ield\x18\x01 \x03(\t"\x87\x01\n\nListenTest\x12;\n\tresponses\x18\x01 \x03(\x0b\x32(.google.firestore.v1beta1.ListenResponse\x12*\n\tsnapshots\x18\x02 \x03(\x0b\x32\x17.tests.v1beta1.Snapshot\x12\x10\n\x08is_error\x18\x03 \x01(\x08"\x96\x01\n\x08Snapshot\x12\x30\n\x04\x64ocs\x18\x01 \x03(\x0b\x32".google.firestore.v1beta1.Document\x12)\n\x07\x63hanges\x18\x02 \x03(\x0b\x32\x18.tests.v1beta1.DocChange\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xd3\x01\n\tDocChange\x12+\n\x04kind\x18\x01 \x01(\x0e\x32\x1d.tests.v1beta1.DocChange.Kind\x12/\n\x03\x64oc\x18\x02 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12\x11\n\told_index\x18\x03 \x01(\x05\x12\x11\n\tnew_index\x18\x04 \x01(\x05"B\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\t\n\x05\x41\x44\x44\x45\x44\x10\x01\x12\x0b\n\x07REMOVED\x10\x02\x12\x0c\n\x08MODIFIED\x10\x03\x42x\n&com.google.cloud.firestore.conformance\xaa\x02"Google.Cloud.Firestore.Tests.Proto\xca\x02(Google\\Cloud\\Firestore\\Tests\\Conformanceb\x06proto3' ), dependencies=[ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR, @@ -49,7 +49,7 @@ _DOCCHANGE_KIND = _descriptor.EnumDescriptor( name="Kind", - full_name="tests.DocChange.Kind", + full_name="tests.v1beta1.DocChange.Kind", filename=None, file=DESCRIPTOR, values=[ @@ -68,22 +68,22 @@ ], containing_type=None, options=None, - serialized_start=2874, - serialized_end=2940, + serialized_start=3107, + serialized_end=3173, ) _sym_db.RegisterEnumDescriptor(_DOCCHANGE_KIND) _TESTSUITE = _descriptor.Descriptor( name="TestSuite", - full_name="tests.TestSuite", + full_name="tests.v1beta1.TestSuite", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="tests", - full_name="tests.TestSuite.tests", + full_name="tests.v1beta1.TestSuite.tests", index=0, number=1, type=11, @@ -108,21 +108,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=262, - serialized_end=301, + serialized_start=278, + serialized_end=325, ) _TEST = _descriptor.Descriptor( name="Test", - full_name="tests.Test", + full_name="tests.v1beta1.Test", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="description", - full_name="tests.Test.description", + full_name="tests.v1beta1.Test.description", index=0, number=1, type=9, @@ -140,7 +140,7 @@ ), _descriptor.FieldDescriptor( name="get", - full_name="tests.Test.get", + full_name="tests.v1beta1.Test.get", index=1, number=2, type=11, @@ -158,7 +158,7 @@ ), _descriptor.FieldDescriptor( name="create", - full_name="tests.Test.create", + full_name="tests.v1beta1.Test.create", index=2, number=3, type=11, @@ -176,7 +176,7 @@ ), _descriptor.FieldDescriptor( name="set", - full_name="tests.Test.set", + full_name="tests.v1beta1.Test.set", index=3, number=4, type=11, @@ -194,7 +194,7 @@ ), _descriptor.FieldDescriptor( name="update", - full_name="tests.Test.update", + full_name="tests.v1beta1.Test.update", index=4, number=5, type=11, @@ -212,7 +212,7 @@ ), _descriptor.FieldDescriptor( name="update_paths", - full_name="tests.Test.update_paths", + full_name="tests.v1beta1.Test.update_paths", index=5, number=6, type=11, @@ -230,7 +230,7 @@ ), _descriptor.FieldDescriptor( name="delete", - full_name="tests.Test.delete", + full_name="tests.v1beta1.Test.delete", index=6, number=7, type=11, @@ -248,7 +248,7 @@ ), _descriptor.FieldDescriptor( name="query", - full_name="tests.Test.query", + full_name="tests.v1beta1.Test.query", index=7, number=8, type=11, @@ -266,7 +266,7 @@ ), _descriptor.FieldDescriptor( name="listen", - full_name="tests.Test.listen", + full_name="tests.v1beta1.Test.listen", index=8, number=9, type=11, @@ -293,27 +293,27 @@ oneofs=[ _descriptor.OneofDescriptor( name="test", - full_name="tests.Test.test", + full_name="tests.v1beta1.Test.test", index=0, containing_type=None, fields=[], ) ], - serialized_start=304, - serialized_end=632, + serialized_start=328, + serialized_end=720, ) _GETTEST = _descriptor.Descriptor( name="GetTest", - full_name="tests.GetTest", + full_name="tests.v1beta1.GetTest", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="doc_ref_path", - full_name="tests.GetTest.doc_ref_path", + full_name="tests.v1beta1.GetTest.doc_ref_path", index=0, number=1, type=9, @@ -331,7 +331,7 @@ ), _descriptor.FieldDescriptor( name="request", - full_name="tests.GetTest.request", + full_name="tests.v1beta1.GetTest.request", index=1, number=2, type=11, @@ -356,21 +356,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=634, - serialized_end=728, + serialized_start=722, + serialized_end=816, ) _CREATETEST = _descriptor.Descriptor( name="CreateTest", - full_name="tests.CreateTest", + full_name="tests.v1beta1.CreateTest", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="doc_ref_path", - full_name="tests.CreateTest.doc_ref_path", + full_name="tests.v1beta1.CreateTest.doc_ref_path", index=0, number=1, type=9, @@ -388,7 +388,7 @@ ), _descriptor.FieldDescriptor( name="json_data", - full_name="tests.CreateTest.json_data", + full_name="tests.v1beta1.CreateTest.json_data", index=1, number=2, type=9, @@ -406,7 +406,7 @@ ), _descriptor.FieldDescriptor( name="request", - full_name="tests.CreateTest.request", + full_name="tests.v1beta1.CreateTest.request", index=2, number=3, type=11, @@ -424,7 +424,7 @@ ), _descriptor.FieldDescriptor( name="is_error", - full_name="tests.CreateTest.is_error", + full_name="tests.v1beta1.CreateTest.is_error", index=3, number=4, type=8, @@ -449,21 +449,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=731, - serialized_end=860, + serialized_start=819, + serialized_end=948, ) _SETTEST = _descriptor.Descriptor( name="SetTest", - full_name="tests.SetTest", + full_name="tests.v1beta1.SetTest", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="doc_ref_path", - full_name="tests.SetTest.doc_ref_path", + full_name="tests.v1beta1.SetTest.doc_ref_path", index=0, number=1, type=9, @@ -481,7 +481,7 @@ ), _descriptor.FieldDescriptor( name="option", - full_name="tests.SetTest.option", + full_name="tests.v1beta1.SetTest.option", index=1, number=2, type=11, @@ -499,7 +499,7 @@ ), _descriptor.FieldDescriptor( name="json_data", - full_name="tests.SetTest.json_data", + full_name="tests.v1beta1.SetTest.json_data", index=2, number=3, type=9, @@ -517,7 +517,7 @@ ), _descriptor.FieldDescriptor( name="request", - full_name="tests.SetTest.request", + full_name="tests.v1beta1.SetTest.request", index=3, number=4, type=11, @@ -535,7 +535,7 @@ ), _descriptor.FieldDescriptor( name="is_error", - full_name="tests.SetTest.is_error", + full_name="tests.v1beta1.SetTest.is_error", index=4, number=5, type=8, @@ -560,21 +560,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=863, - serialized_end=1023, + serialized_start=951, + serialized_end=1119, ) _UPDATETEST = _descriptor.Descriptor( name="UpdateTest", - full_name="tests.UpdateTest", + full_name="tests.v1beta1.UpdateTest", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="doc_ref_path", - full_name="tests.UpdateTest.doc_ref_path", + full_name="tests.v1beta1.UpdateTest.doc_ref_path", index=0, number=1, type=9, @@ -592,7 +592,7 @@ ), _descriptor.FieldDescriptor( name="precondition", - full_name="tests.UpdateTest.precondition", + full_name="tests.v1beta1.UpdateTest.precondition", index=1, number=2, type=11, @@ -610,7 +610,7 @@ ), _descriptor.FieldDescriptor( name="json_data", - full_name="tests.UpdateTest.json_data", + full_name="tests.v1beta1.UpdateTest.json_data", index=2, number=3, type=9, @@ -628,7 +628,7 @@ ), _descriptor.FieldDescriptor( name="request", - full_name="tests.UpdateTest.request", + full_name="tests.v1beta1.UpdateTest.request", index=3, number=4, type=11, @@ -646,7 +646,7 @@ ), _descriptor.FieldDescriptor( name="is_error", - full_name="tests.UpdateTest.is_error", + full_name="tests.v1beta1.UpdateTest.is_error", index=4, number=5, type=8, @@ -671,21 +671,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1026, - serialized_end=1217, + serialized_start=1122, + serialized_end=1313, ) _UPDATEPATHSTEST = _descriptor.Descriptor( name="UpdatePathsTest", - full_name="tests.UpdatePathsTest", + full_name="tests.v1beta1.UpdatePathsTest", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="doc_ref_path", - full_name="tests.UpdatePathsTest.doc_ref_path", + full_name="tests.v1beta1.UpdatePathsTest.doc_ref_path", index=0, number=1, type=9, @@ -703,7 +703,7 @@ ), _descriptor.FieldDescriptor( name="precondition", - full_name="tests.UpdatePathsTest.precondition", + full_name="tests.v1beta1.UpdatePathsTest.precondition", index=1, number=2, type=11, @@ -721,7 +721,7 @@ ), _descriptor.FieldDescriptor( name="field_paths", - full_name="tests.UpdatePathsTest.field_paths", + full_name="tests.v1beta1.UpdatePathsTest.field_paths", index=2, number=3, type=11, @@ -739,7 +739,7 @@ ), _descriptor.FieldDescriptor( name="json_values", - full_name="tests.UpdatePathsTest.json_values", + full_name="tests.v1beta1.UpdatePathsTest.json_values", index=3, number=4, type=9, @@ -757,7 +757,7 @@ ), _descriptor.FieldDescriptor( name="request", - full_name="tests.UpdatePathsTest.request", + full_name="tests.v1beta1.UpdatePathsTest.request", index=4, number=5, type=11, @@ -775,7 +775,7 @@ ), _descriptor.FieldDescriptor( name="is_error", - full_name="tests.UpdatePathsTest.is_error", + full_name="tests.v1beta1.UpdatePathsTest.is_error", index=5, number=6, type=8, @@ -800,21 +800,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1220, - serialized_end=1457, + serialized_start=1316, + serialized_end=1561, ) _DELETETEST = _descriptor.Descriptor( name="DeleteTest", - full_name="tests.DeleteTest", + full_name="tests.v1beta1.DeleteTest", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="doc_ref_path", - full_name="tests.DeleteTest.doc_ref_path", + full_name="tests.v1beta1.DeleteTest.doc_ref_path", index=0, number=1, type=9, @@ -832,7 +832,7 @@ ), _descriptor.FieldDescriptor( name="precondition", - full_name="tests.DeleteTest.precondition", + full_name="tests.v1beta1.DeleteTest.precondition", index=1, number=2, type=11, @@ -850,7 +850,7 @@ ), _descriptor.FieldDescriptor( name="request", - full_name="tests.DeleteTest.request", + full_name="tests.v1beta1.DeleteTest.request", index=2, number=3, type=11, @@ -868,7 +868,7 @@ ), _descriptor.FieldDescriptor( name="is_error", - full_name="tests.DeleteTest.is_error", + full_name="tests.v1beta1.DeleteTest.is_error", index=3, number=4, type=8, @@ -893,21 +893,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1460, - serialized_end=1632, + serialized_start=1564, + serialized_end=1736, ) _SETOPTION = _descriptor.Descriptor( name="SetOption", - full_name="tests.SetOption", + full_name="tests.v1beta1.SetOption", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="all", - full_name="tests.SetOption.all", + full_name="tests.v1beta1.SetOption.all", index=0, number=1, type=8, @@ -925,7 +925,7 @@ ), _descriptor.FieldDescriptor( name="fields", - full_name="tests.SetOption.fields", + full_name="tests.v1beta1.SetOption.fields", index=1, number=2, type=11, @@ -950,21 +950,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1634, - serialized_end=1692, + serialized_start=1738, + serialized_end=1804, ) _QUERYTEST = _descriptor.Descriptor( name="QueryTest", - full_name="tests.QueryTest", + full_name="tests.v1beta1.QueryTest", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="coll_path", - full_name="tests.QueryTest.coll_path", + full_name="tests.v1beta1.QueryTest.coll_path", index=0, number=1, type=9, @@ -982,7 +982,7 @@ ), _descriptor.FieldDescriptor( name="clauses", - full_name="tests.QueryTest.clauses", + full_name="tests.v1beta1.QueryTest.clauses", index=1, number=2, type=11, @@ -1000,7 +1000,7 @@ ), _descriptor.FieldDescriptor( name="query", - full_name="tests.QueryTest.query", + full_name="tests.v1beta1.QueryTest.query", index=2, number=3, type=11, @@ -1018,7 +1018,7 @@ ), _descriptor.FieldDescriptor( name="is_error", - full_name="tests.QueryTest.is_error", + full_name="tests.v1beta1.QueryTest.is_error", index=3, number=4, type=8, @@ -1043,21 +1043,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1695, - serialized_end=1833, + serialized_start=1807, + serialized_end=1953, ) _CLAUSE = _descriptor.Descriptor( name="Clause", - full_name="tests.Clause", + full_name="tests.v1beta1.Clause", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="select", - full_name="tests.Clause.select", + full_name="tests.v1beta1.Clause.select", index=0, number=1, type=11, @@ -1075,7 +1075,7 @@ ), _descriptor.FieldDescriptor( name="where", - full_name="tests.Clause.where", + full_name="tests.v1beta1.Clause.where", index=1, number=2, type=11, @@ -1093,7 +1093,7 @@ ), _descriptor.FieldDescriptor( name="order_by", - full_name="tests.Clause.order_by", + full_name="tests.v1beta1.Clause.order_by", index=2, number=3, type=11, @@ -1111,7 +1111,7 @@ ), _descriptor.FieldDescriptor( name="offset", - full_name="tests.Clause.offset", + full_name="tests.v1beta1.Clause.offset", index=3, number=4, type=5, @@ -1129,7 +1129,7 @@ ), _descriptor.FieldDescriptor( name="limit", - full_name="tests.Clause.limit", + full_name="tests.v1beta1.Clause.limit", index=4, number=5, type=5, @@ -1147,7 +1147,7 @@ ), _descriptor.FieldDescriptor( name="start_at", - full_name="tests.Clause.start_at", + full_name="tests.v1beta1.Clause.start_at", index=5, number=6, type=11, @@ -1165,7 +1165,7 @@ ), _descriptor.FieldDescriptor( name="start_after", - full_name="tests.Clause.start_after", + full_name="tests.v1beta1.Clause.start_after", index=6, number=7, type=11, @@ -1183,7 +1183,7 @@ ), _descriptor.FieldDescriptor( name="end_at", - full_name="tests.Clause.end_at", + full_name="tests.v1beta1.Clause.end_at", index=7, number=8, type=11, @@ -1201,7 +1201,7 @@ ), _descriptor.FieldDescriptor( name="end_before", - full_name="tests.Clause.end_before", + full_name="tests.v1beta1.Clause.end_before", index=8, number=9, type=11, @@ -1228,27 +1228,27 @@ oneofs=[ _descriptor.OneofDescriptor( name="clause", - full_name="tests.Clause.clause", + full_name="tests.v1beta1.Clause.clause", index=0, containing_type=None, fields=[], ) ], - serialized_start=1836, - serialized_end=2132, + serialized_start=1956, + serialized_end=2308, ) _SELECT = _descriptor.Descriptor( name="Select", - full_name="tests.Select", + full_name="tests.v1beta1.Select", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="fields", - full_name="tests.Select.fields", + full_name="tests.v1beta1.Select.fields", index=0, number=1, type=11, @@ -1273,21 +1273,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2134, - serialized_end=2176, + serialized_start=2310, + serialized_end=2360, ) _WHERE = _descriptor.Descriptor( name="Where", - full_name="tests.Where", + full_name="tests.v1beta1.Where", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="path", - full_name="tests.Where.path", + full_name="tests.v1beta1.Where.path", index=0, number=1, type=11, @@ -1305,7 +1305,7 @@ ), _descriptor.FieldDescriptor( name="op", - full_name="tests.Where.op", + full_name="tests.v1beta1.Where.op", index=1, number=2, type=9, @@ -1323,7 +1323,7 @@ ), _descriptor.FieldDescriptor( name="json_value", - full_name="tests.Where.json_value", + full_name="tests.v1beta1.Where.json_value", index=2, number=3, type=9, @@ -1348,21 +1348,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2178, - serialized_end=2249, + serialized_start=2362, + serialized_end=2441, ) _ORDERBY = _descriptor.Descriptor( name="OrderBy", - full_name="tests.OrderBy", + full_name="tests.v1beta1.OrderBy", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="path", - full_name="tests.OrderBy.path", + full_name="tests.v1beta1.OrderBy.path", index=0, number=1, type=11, @@ -1380,7 +1380,7 @@ ), _descriptor.FieldDescriptor( name="direction", - full_name="tests.OrderBy.direction", + full_name="tests.v1beta1.OrderBy.direction", index=1, number=2, type=9, @@ -1405,21 +1405,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2251, - serialized_end=2311, + serialized_start=2443, + serialized_end=2511, ) _CURSOR = _descriptor.Descriptor( name="Cursor", - full_name="tests.Cursor", + full_name="tests.v1beta1.Cursor", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="doc_snapshot", - full_name="tests.Cursor.doc_snapshot", + full_name="tests.v1beta1.Cursor.doc_snapshot", index=0, number=1, type=11, @@ -1437,7 +1437,7 @@ ), _descriptor.FieldDescriptor( name="json_values", - full_name="tests.Cursor.json_values", + full_name="tests.v1beta1.Cursor.json_values", index=1, number=2, type=9, @@ -1462,21 +1462,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2313, - serialized_end=2384, + serialized_start=2513, + serialized_end=2592, ) _DOCSNAPSHOT = _descriptor.Descriptor( name="DocSnapshot", - full_name="tests.DocSnapshot", + full_name="tests.v1beta1.DocSnapshot", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="path", - full_name="tests.DocSnapshot.path", + full_name="tests.v1beta1.DocSnapshot.path", index=0, number=1, type=9, @@ -1494,7 +1494,7 @@ ), _descriptor.FieldDescriptor( name="json_data", - full_name="tests.DocSnapshot.json_data", + full_name="tests.v1beta1.DocSnapshot.json_data", index=1, number=2, type=9, @@ -1519,21 +1519,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2386, - serialized_end=2432, + serialized_start=2594, + serialized_end=2640, ) _FIELDPATH = _descriptor.Descriptor( name="FieldPath", - full_name="tests.FieldPath", + full_name="tests.v1beta1.FieldPath", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="field", - full_name="tests.FieldPath.field", + full_name="tests.v1beta1.FieldPath.field", index=0, number=1, type=9, @@ -1558,21 +1558,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2434, - serialized_end=2460, + serialized_start=2642, + serialized_end=2668, ) _LISTENTEST = _descriptor.Descriptor( name="ListenTest", - full_name="tests.ListenTest", + full_name="tests.v1beta1.ListenTest", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="responses", - full_name="tests.ListenTest.responses", + full_name="tests.v1beta1.ListenTest.responses", index=0, number=1, type=11, @@ -1590,7 +1590,7 @@ ), _descriptor.FieldDescriptor( name="snapshots", - full_name="tests.ListenTest.snapshots", + full_name="tests.v1beta1.ListenTest.snapshots", index=1, number=2, type=11, @@ -1608,7 +1608,7 @@ ), _descriptor.FieldDescriptor( name="is_error", - full_name="tests.ListenTest.is_error", + full_name="tests.v1beta1.ListenTest.is_error", index=2, number=3, type=8, @@ -1633,21 +1633,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2462, - serialized_end=2589, + serialized_start=2671, + serialized_end=2806, ) _SNAPSHOT = _descriptor.Descriptor( name="Snapshot", - full_name="tests.Snapshot", + full_name="tests.v1beta1.Snapshot", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="docs", - full_name="tests.Snapshot.docs", + full_name="tests.v1beta1.Snapshot.docs", index=0, number=1, type=11, @@ -1665,7 +1665,7 @@ ), _descriptor.FieldDescriptor( name="changes", - full_name="tests.Snapshot.changes", + full_name="tests.v1beta1.Snapshot.changes", index=1, number=2, type=11, @@ -1683,7 +1683,7 @@ ), _descriptor.FieldDescriptor( name="read_time", - full_name="tests.Snapshot.read_time", + full_name="tests.v1beta1.Snapshot.read_time", index=2, number=3, type=11, @@ -1708,21 +1708,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2592, - serialized_end=2734, + serialized_start=2809, + serialized_end=2959, ) _DOCCHANGE = _descriptor.Descriptor( name="DocChange", - full_name="tests.DocChange", + full_name="tests.v1beta1.DocChange", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="kind", - full_name="tests.DocChange.kind", + full_name="tests.v1beta1.DocChange.kind", index=0, number=1, type=14, @@ -1740,7 +1740,7 @@ ), _descriptor.FieldDescriptor( name="doc", - full_name="tests.DocChange.doc", + full_name="tests.v1beta1.DocChange.doc", index=1, number=2, type=11, @@ -1758,7 +1758,7 @@ ), _descriptor.FieldDescriptor( name="old_index", - full_name="tests.DocChange.old_index", + full_name="tests.v1beta1.DocChange.old_index", index=2, number=3, type=5, @@ -1776,7 +1776,7 @@ ), _descriptor.FieldDescriptor( name="new_index", - full_name="tests.DocChange.new_index", + full_name="tests.v1beta1.DocChange.new_index", index=3, number=4, type=5, @@ -1801,8 +1801,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2737, - serialized_end=2940, + serialized_start=2962, + serialized_end=3173, ) _TESTSUITE.fields_by_name["tests"].message_type = _TEST @@ -1964,8 +1964,8 @@ (_message.Message,), dict( DESCRIPTOR=_TESTSUITE, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.TestSuite) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.TestSuite) ), ) _sym_db.RegisterMessage(TestSuite) @@ -1975,8 +1975,8 @@ (_message.Message,), dict( DESCRIPTOR=_TEST, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.Test) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.Test) ), ) _sym_db.RegisterMessage(Test) @@ -1986,8 +1986,8 @@ (_message.Message,), dict( DESCRIPTOR=_GETTEST, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.GetTest) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.GetTest) ), ) _sym_db.RegisterMessage(GetTest) @@ -1997,8 +1997,8 @@ (_message.Message,), dict( DESCRIPTOR=_CREATETEST, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.CreateTest) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.CreateTest) ), ) _sym_db.RegisterMessage(CreateTest) @@ -2008,8 +2008,8 @@ (_message.Message,), dict( DESCRIPTOR=_SETTEST, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.SetTest) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.SetTest) ), ) _sym_db.RegisterMessage(SetTest) @@ -2019,8 +2019,8 @@ (_message.Message,), dict( DESCRIPTOR=_UPDATETEST, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.UpdateTest) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.UpdateTest) ), ) _sym_db.RegisterMessage(UpdateTest) @@ -2030,8 +2030,8 @@ (_message.Message,), dict( DESCRIPTOR=_UPDATEPATHSTEST, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.UpdatePathsTest) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.UpdatePathsTest) ), ) _sym_db.RegisterMessage(UpdatePathsTest) @@ -2041,8 +2041,8 @@ (_message.Message,), dict( DESCRIPTOR=_DELETETEST, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.DeleteTest) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.DeleteTest) ), ) _sym_db.RegisterMessage(DeleteTest) @@ -2052,8 +2052,8 @@ (_message.Message,), dict( DESCRIPTOR=_SETOPTION, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.SetOption) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.SetOption) ), ) _sym_db.RegisterMessage(SetOption) @@ -2063,8 +2063,8 @@ (_message.Message,), dict( DESCRIPTOR=_QUERYTEST, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.QueryTest) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.QueryTest) ), ) _sym_db.RegisterMessage(QueryTest) @@ -2074,8 +2074,8 @@ (_message.Message,), dict( DESCRIPTOR=_CLAUSE, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.Clause) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.Clause) ), ) _sym_db.RegisterMessage(Clause) @@ -2085,8 +2085,8 @@ (_message.Message,), dict( DESCRIPTOR=_SELECT, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.Select) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.Select) ), ) _sym_db.RegisterMessage(Select) @@ -2096,8 +2096,8 @@ (_message.Message,), dict( DESCRIPTOR=_WHERE, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.Where) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.Where) ), ) _sym_db.RegisterMessage(Where) @@ -2107,8 +2107,8 @@ (_message.Message,), dict( DESCRIPTOR=_ORDERBY, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.OrderBy) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.OrderBy) ), ) _sym_db.RegisterMessage(OrderBy) @@ -2118,8 +2118,8 @@ (_message.Message,), dict( DESCRIPTOR=_CURSOR, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.Cursor) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.Cursor) ), ) _sym_db.RegisterMessage(Cursor) @@ -2129,8 +2129,8 @@ (_message.Message,), dict( DESCRIPTOR=_DOCSNAPSHOT, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.DocSnapshot) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.DocSnapshot) ), ) _sym_db.RegisterMessage(DocSnapshot) @@ -2140,8 +2140,8 @@ (_message.Message,), dict( DESCRIPTOR=_FIELDPATH, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.FieldPath) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.FieldPath) ), ) _sym_db.RegisterMessage(FieldPath) @@ -2151,8 +2151,8 @@ (_message.Message,), dict( DESCRIPTOR=_LISTENTEST, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.ListenTest) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.ListenTest) ), ) _sym_db.RegisterMessage(ListenTest) @@ -2162,8 +2162,8 @@ (_message.Message,), dict( DESCRIPTOR=_SNAPSHOT, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.Snapshot) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.Snapshot) ), ) _sym_db.RegisterMessage(Snapshot) @@ -2173,8 +2173,8 @@ (_message.Message,), dict( DESCRIPTOR=_DOCCHANGE, - __module__="test_pb2" - # @@protoc_insertion_point(class_scope:tests.DocChange) + __module__="test_v1beta1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1beta1.DocChange) ), ) _sym_db.RegisterMessage(DocChange) diff --git a/firestore/tests/unit/v1beta1/test_cross_language.py b/firestore/tests/unit/v1beta1/test_cross_language.py index 2264b4ce9450..f9b8d0d42c70 100644 --- a/firestore/tests/unit/v1beta1/test_cross_language.py +++ b/firestore/tests/unit/v1beta1/test_cross_language.py @@ -23,14 +23,14 @@ from google.protobuf import text_format from google.cloud.firestore_v1beta1.proto import document_pb2 from google.cloud.firestore_v1beta1.proto import firestore_pb2 -from google.cloud.firestore_v1beta1.proto import test_pb2 +from google.cloud.firestore_v1beta1.proto import test_v1beta1_pb2 from google.cloud.firestore_v1beta1.proto import write_pb2 def _load_testproto(filename): with open(filename, "r") as tp_file: tp_text = tp_file.read() - test_proto = test_pb2.Test() + test_proto = test_v1beta1_pb2.Test() text_format.Merge(tp_text, test_proto) shortname = os.path.split(filename)[-1] test_proto.description = test_proto.description + " (%s)" % shortname From bb58304eb57bd25956645c577f04036f2788205a Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 6 Mar 2019 15:40:05 -0500 Subject: [PATCH 4/7] Fork manual 'v1beta1' code -> 'v1' and bash to fit. --- firestore/Makefile | 36 - firestore/Makefile_v1 | 37 + .../google/cloud/firestore_v1/__init__.py | 65 + .../google/cloud/firestore_v1/_helpers.py | 1002 ++++++++ firestore/google/cloud/firestore_v1/batch.py | 161 ++ firestore/google/cloud/firestore_v1/client.py | 513 ++++ .../google/cloud/firestore_v1/collection.py | 477 ++++ .../google/cloud/firestore_v1/document.py | 780 ++++++ .../google/cloud/firestore_v1/field_path.py | 386 +++ firestore/google/cloud/firestore_v1/order.py | 207 ++ .../cloud/firestore_v1/proto/test_v1_pb2.py | 2190 +++++++++++++++++ firestore/google/cloud/firestore_v1/query.py | 970 ++++++++ .../google/cloud/firestore_v1/transaction.py | 409 +++ .../google/cloud/firestore_v1/transforms.py | 90 + firestore/google/cloud/firestore_v1/types.py | 63 + firestore/google/cloud/firestore_v1/watch.py | 721 ++++++ firestore/tests/unit/v1/__init__.py | 13 + firestore/tests/unit/v1/test__helpers.py | 2089 ++++++++++++++++ firestore/tests/unit/v1/test_batch.py | 271 ++ firestore/tests/unit/v1/test_client.py | 629 +++++ firestore/tests/unit/v1/test_collection.py | 589 +++++ .../tests/unit/v1/test_cross_language.py | 495 ++++ firestore/tests/unit/v1/test_document.py | 825 +++++++ firestore/tests/unit/v1/test_field_path.py | 495 ++++ firestore/tests/unit/v1/test_order.py | 247 ++ firestore/tests/unit/v1/test_query.py | 1587 ++++++++++++ firestore/tests/unit/v1/test_transaction.py | 985 ++++++++ firestore/tests/unit/v1/test_transforms.py | 65 + firestore/tests/unit/v1/test_watch.py | 830 +++++++ .../testdata/create-all-transforms.textproto | 64 + .../create-arrayremove-multi.textproto | 61 + .../create-arrayremove-nested.textproto | 48 + ...reate-arrayremove-noarray-nested.textproto | 12 + .../create-arrayremove-noarray.textproto | 12 + .../create-arrayremove-with-st.textproto | 12 + .../v1/testdata/create-arrayremove.textproto | 47 + .../create-arrayunion-multi.textproto | 61 + .../create-arrayunion-nested.textproto | 48 + ...create-arrayunion-noarray-nested.textproto | 12 + .../create-arrayunion-noarray.textproto | 12 + .../create-arrayunion-with-st.textproto | 12 + .../v1/testdata/create-arrayunion.textproto | 47 + .../unit/v1/testdata/create-basic.textproto | 27 + .../unit/v1/testdata/create-complex.textproto | 61 + .../create-del-noarray-nested.textproto | 13 + .../v1/testdata/create-del-noarray.textproto | 13 + .../unit/v1/testdata/create-empty.textproto | 20 + .../unit/v1/testdata/create-nodel.textproto | 11 + .../unit/v1/testdata/create-nosplit.textproto | 40 + .../testdata/create-special-chars.textproto | 41 + .../v1/testdata/create-st-alone.textproto | 26 + .../v1/testdata/create-st-multi.textproto | 41 + .../v1/testdata/create-st-nested.textproto | 38 + .../create-st-noarray-nested.textproto | 12 + .../v1/testdata/create-st-noarray.textproto | 12 + .../create-st-with-empty-map.textproto | 45 + .../unit/v1/testdata/create-st.textproto | 39 + .../testdata/delete-exists-precond.textproto | 21 + .../v1/testdata/delete-no-precond.textproto | 15 + .../v1/testdata/delete-time-precond.textproto | 25 + .../unit/v1/testdata/get-basic.textproto | 12 + .../testdata/listen-add-mod-del-add.textproto | 246 ++ .../unit/v1/testdata/listen-add-one.textproto | 79 + .../v1/testdata/listen-add-three.textproto | 190 ++ .../v1/testdata/listen-doc-remove.textproto | 115 + .../unit/v1/testdata/listen-empty.textproto | 25 + .../v1/testdata/listen-filter-nop.textproto | 247 ++ .../v1/testdata/listen-multi-docs.textproto | 524 ++++ .../v1/testdata/listen-nocurrent.textproto | 141 ++ .../unit/v1/testdata/listen-nomod.textproto | 143 ++ .../listen-removed-target-ids.textproto | 131 + .../unit/v1/testdata/listen-reset.textproto | 382 +++ .../testdata/listen-target-add-nop.textproto | 88 + .../listen-target-add-wrong-id.textproto | 50 + .../testdata/listen-target-remove.textproto | 46 + .../query-arrayremove-cursor.textproto | 23 + .../query-arrayremove-where.textproto | 19 + .../query-arrayunion-cursor.textproto | 23 + .../testdata/query-arrayunion-where.textproto | 19 + .../unit/v1/testdata/query-bad-NaN.textproto | 19 + .../unit/v1/testdata/query-bad-null.textproto | 19 + .../query-cursor-docsnap-order.textproto | 68 + ...uery-cursor-docsnap-orderby-name.textproto | 76 + .../query-cursor-docsnap-where-eq.textproto | 53 + ...cursor-docsnap-where-neq-orderby.textproto | 72 + .../query-cursor-docsnap-where-neq.textproto | 64 + .../testdata/query-cursor-docsnap.textproto | 34 + ...query-cursor-endbefore-empty-map.textproto | 41 + .../query-cursor-endbefore-empty.textproto | 23 + .../testdata/query-cursor-no-order.textproto | 16 + .../query-cursor-startat-empty-map.textproto | 41 + .../query-cursor-startat-empty.textproto | 23 + .../testdata/query-cursor-vals-1a.textproto | 50 + .../testdata/query-cursor-vals-1b.textproto | 48 + .../v1/testdata/query-cursor-vals-2.textproto | 71 + .../query-cursor-vals-docid.textproto | 50 + .../query-cursor-vals-last-wins.textproto | 60 + .../v1/testdata/query-del-cursor.textproto | 23 + .../v1/testdata/query-del-where.textproto | 19 + .../testdata/query-invalid-operator.textproto | 19 + .../query-invalid-path-order.textproto | 19 + .../query-invalid-path-select.textproto | 18 + .../query-invalid-path-where.textproto | 20 + .../query-offset-limit-last-wins.textproto | 30 + .../v1/testdata/query-offset-limit.textproto | 24 + .../unit/v1/testdata/query-order.textproto | 42 + .../v1/testdata/query-select-empty.textproto | 23 + .../testdata/query-select-last-wins.textproto | 36 + .../unit/v1/testdata/query-select.textproto | 32 + .../v1/testdata/query-st-cursor.textproto | 23 + .../unit/v1/testdata/query-st-where.textproto | 19 + .../unit/v1/testdata/query-where-2.textproto | 59 + .../v1/testdata/query-where-NaN.textproto | 31 + .../v1/testdata/query-where-null.textproto | 31 + .../unit/v1/testdata/query-where.textproto | 34 + .../testdata/query-wrong-collection.textproto | 19 + .../v1/testdata/set-all-transforms.textproto | 61 + .../testdata/set-arrayremove-multi.textproto | 58 + .../testdata/set-arrayremove-nested.textproto | 45 + .../set-arrayremove-noarray-nested.textproto | 12 + .../set-arrayremove-noarray.textproto | 12 + .../set-arrayremove-with-st.textproto | 12 + .../v1/testdata/set-arrayremove.textproto | 44 + .../testdata/set-arrayunion-multi.textproto | 58 + .../testdata/set-arrayunion-nested.textproto | 45 + .../set-arrayunion-noarray-nested.textproto | 12 + .../testdata/set-arrayunion-noarray.textproto | 12 + .../testdata/set-arrayunion-with-st.textproto | 12 + .../unit/v1/testdata/set-arrayunion.textproto | 44 + .../unit/v1/testdata/set-basic.textproto | 24 + .../unit/v1/testdata/set-complex.textproto | 58 + .../v1/testdata/set-del-merge-alone.textproto | 28 + .../unit/v1/testdata/set-del-merge.textproto | 37 + .../v1/testdata/set-del-mergeall.textproto | 31 + .../testdata/set-del-noarray-nested.textproto | 13 + .../v1/testdata/set-del-noarray.textproto | 13 + .../v1/testdata/set-del-nomerge.textproto | 17 + .../v1/testdata/set-del-nonleaf.textproto | 19 + .../v1/testdata/set-del-wo-merge.textproto | 12 + .../unit/v1/testdata/set-empty.textproto | 17 + .../unit/v1/testdata/set-merge-fp.textproto | 40 + .../v1/testdata/set-merge-nested.textproto | 41 + .../v1/testdata/set-merge-nonleaf.textproto | 46 + .../v1/testdata/set-merge-prefix.textproto | 21 + .../v1/testdata/set-merge-present.textproto | 20 + .../unit/v1/testdata/set-merge.textproto | 32 + .../v1/testdata/set-mergeall-empty.textproto | 23 + .../v1/testdata/set-mergeall-nested.textproto | 45 + .../unit/v1/testdata/set-mergeall.textproto | 37 + .../unit/v1/testdata/set-nodel.textproto | 11 + .../unit/v1/testdata/set-nosplit.textproto | 37 + .../v1/testdata/set-special-chars.textproto | 38 + .../testdata/set-st-alone-mergeall.textproto | 26 + .../unit/v1/testdata/set-st-alone.textproto | 28 + .../v1/testdata/set-st-merge-both.textproto | 45 + .../set-st-merge-nonleaf-alone.textproto | 37 + .../testdata/set-st-merge-nonleaf.textproto | 49 + .../testdata/set-st-merge-nowrite.textproto | 28 + .../v1/testdata/set-st-mergeall.textproto | 40 + .../unit/v1/testdata/set-st-multi.textproto | 38 + .../unit/v1/testdata/set-st-nested.textproto | 35 + .../testdata/set-st-noarray-nested.textproto | 12 + .../unit/v1/testdata/set-st-noarray.textproto | 12 + .../unit/v1/testdata/set-st-nomerge.textproto | 33 + .../testdata/set-st-with-empty-map.textproto | 42 + .../tests/unit/v1/testdata/set-st.textproto | 36 + .../unit/v1/testdata/test-suite.binproto | Bin 0 -> 55916 bytes .../testdata/update-all-transforms.textproto | 67 + .../update-arrayremove-alone.textproto | 36 + .../update-arrayremove-multi.textproto | 69 + .../update-arrayremove-nested.textproto | 52 + ...pdate-arrayremove-noarray-nested.textproto | 12 + .../update-arrayremove-noarray.textproto | 12 + .../update-arrayremove-with-st.textproto | 12 + .../v1/testdata/update-arrayremove.textproto | 50 + .../update-arrayunion-alone.textproto | 36 + .../update-arrayunion-multi.textproto | 69 + .../update-arrayunion-nested.textproto | 52 + ...update-arrayunion-noarray-nested.textproto | 12 + .../update-arrayunion-noarray.textproto | 12 + .../update-arrayunion-with-st.textproto | 12 + .../v1/testdata/update-arrayunion.textproto | 50 + .../unit/v1/testdata/update-badchar.textproto | 12 + .../unit/v1/testdata/update-basic.textproto | 30 + .../unit/v1/testdata/update-complex.textproto | 65 + .../v1/testdata/update-del-alone.textproto | 25 + .../unit/v1/testdata/update-del-dot.textproto | 46 + .../v1/testdata/update-del-nested.textproto | 11 + .../update-del-noarray-nested.textproto | 13 + .../v1/testdata/update-del-noarray.textproto | 13 + .../unit/v1/testdata/update-del.textproto | 32 + .../testdata/update-exists-precond.textproto | 14 + .../update-fp-empty-component.textproto | 11 + .../v1/testdata/update-no-paths.textproto | 11 + .../update-paths-all-transforms.textproto | 82 + .../update-paths-arrayremove-alone.textproto | 39 + .../update-paths-arrayremove-multi.textproto | 76 + .../update-paths-arrayremove-nested.textproto | 59 + ...paths-arrayremove-noarray-nested.textproto | 15 + ...update-paths-arrayremove-noarray.textproto | 15 + ...update-paths-arrayremove-with-st.textproto | 15 + .../update-paths-arrayremove.textproto | 57 + .../update-paths-arrayunion-alone.textproto | 39 + .../update-paths-arrayunion-multi.textproto | 76 + .../update-paths-arrayunion-nested.textproto | 59 + ...-paths-arrayunion-noarray-nested.textproto | 15 + .../update-paths-arrayunion-noarray.textproto | 15 + .../update-paths-arrayunion-with-st.textproto | 15 + .../update-paths-arrayunion.textproto | 57 + .../v1/testdata/update-paths-basic.textproto | 33 + .../testdata/update-paths-complex.textproto | 72 + .../testdata/update-paths-del-alone.textproto | 28 + .../update-paths-del-nested.textproto | 14 + .../update-paths-del-noarray-nested.textproto | 16 + .../update-paths-del-noarray.textproto | 16 + .../v1/testdata/update-paths-del.textproto | 39 + .../update-paths-exists-precond.textproto | 17 + .../v1/testdata/update-paths-fp-del.textproto | 47 + .../update-paths-fp-dup-transforms.textproto | 23 + .../v1/testdata/update-paths-fp-dup.textproto | 22 + .../update-paths-fp-empty-component.textproto | 15 + .../testdata/update-paths-fp-empty.textproto | 13 + .../testdata/update-paths-fp-multi.textproto | 42 + .../update-paths-fp-nosplit.textproto | 48 + .../testdata/update-paths-no-paths.textproto | 10 + .../testdata/update-paths-prefix-1.textproto | 19 + .../testdata/update-paths-prefix-2.textproto | 19 + .../testdata/update-paths-prefix-3.textproto | 20 + .../update-paths-special-chars.textproto | 53 + .../testdata/update-paths-st-alone.textproto | 29 + .../testdata/update-paths-st-multi.textproto | 56 + .../testdata/update-paths-st-nested.textproto | 49 + .../update-paths-st-noarray-nested.textproto | 15 + .../update-paths-st-noarray.textproto | 15 + .../update-paths-st-with-empty-map.textproto | 51 + .../v1/testdata/update-paths-st.textproto | 49 + .../v1/testdata/update-paths-uptime.textproto | 40 + .../v1/testdata/update-prefix-1.textproto | 11 + .../v1/testdata/update-prefix-2.textproto | 11 + .../v1/testdata/update-prefix-3.textproto | 12 + .../unit/v1/testdata/update-quoting.textproto | 45 + .../testdata/update-split-top-level.textproto | 45 + .../unit/v1/testdata/update-split.textproto | 44 + .../v1/testdata/update-st-alone.textproto | 26 + .../unit/v1/testdata/update-st-dot.textproto | 27 + .../v1/testdata/update-st-multi.textproto | 49 + .../v1/testdata/update-st-nested.textproto | 42 + .../update-st-noarray-nested.textproto | 12 + .../v1/testdata/update-st-noarray.textproto | 12 + .../update-st-with-empty-map.textproto | 48 + .../unit/v1/testdata/update-st.textproto | 42 + .../unit/v1/testdata/update-uptime.textproto | 37 + 252 files changed, 26342 insertions(+), 36 deletions(-) delete mode 100644 firestore/Makefile create mode 100644 firestore/Makefile_v1 create mode 100644 firestore/google/cloud/firestore_v1/__init__.py create mode 100644 firestore/google/cloud/firestore_v1/_helpers.py create mode 100644 firestore/google/cloud/firestore_v1/batch.py create mode 100644 firestore/google/cloud/firestore_v1/client.py create mode 100644 firestore/google/cloud/firestore_v1/collection.py create mode 100644 firestore/google/cloud/firestore_v1/document.py create mode 100644 firestore/google/cloud/firestore_v1/field_path.py create mode 100644 firestore/google/cloud/firestore_v1/order.py create mode 100644 firestore/google/cloud/firestore_v1/proto/test_v1_pb2.py create mode 100644 firestore/google/cloud/firestore_v1/query.py create mode 100644 firestore/google/cloud/firestore_v1/transaction.py create mode 100644 firestore/google/cloud/firestore_v1/transforms.py create mode 100644 firestore/google/cloud/firestore_v1/types.py create mode 100644 firestore/google/cloud/firestore_v1/watch.py create mode 100644 firestore/tests/unit/v1/__init__.py create mode 100644 firestore/tests/unit/v1/test__helpers.py create mode 100644 firestore/tests/unit/v1/test_batch.py create mode 100644 firestore/tests/unit/v1/test_client.py create mode 100644 firestore/tests/unit/v1/test_collection.py create mode 100644 firestore/tests/unit/v1/test_cross_language.py create mode 100644 firestore/tests/unit/v1/test_document.py create mode 100644 firestore/tests/unit/v1/test_field_path.py create mode 100644 firestore/tests/unit/v1/test_order.py create mode 100644 firestore/tests/unit/v1/test_query.py create mode 100644 firestore/tests/unit/v1/test_transaction.py create mode 100644 firestore/tests/unit/v1/test_transforms.py create mode 100644 firestore/tests/unit/v1/test_watch.py create mode 100644 firestore/tests/unit/v1/testdata/create-all-transforms.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-arrayremove-multi.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-arrayremove-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-arrayremove-noarray-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-arrayremove-noarray.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-arrayremove-with-st.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-arrayremove.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-arrayunion-multi.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-arrayunion-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-arrayunion-noarray-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-arrayunion-noarray.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-arrayunion-with-st.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-arrayunion.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-basic.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-complex.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-del-noarray-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-del-noarray.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-empty.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-nodel.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-nosplit.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-special-chars.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-st-alone.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-st-multi.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-st-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-st-noarray-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-st-noarray.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-st-with-empty-map.textproto create mode 100644 firestore/tests/unit/v1/testdata/create-st.textproto create mode 100644 firestore/tests/unit/v1/testdata/delete-exists-precond.textproto create mode 100644 firestore/tests/unit/v1/testdata/delete-no-precond.textproto create mode 100644 firestore/tests/unit/v1/testdata/delete-time-precond.textproto create mode 100644 firestore/tests/unit/v1/testdata/get-basic.textproto create mode 100644 firestore/tests/unit/v1/testdata/listen-add-mod-del-add.textproto create mode 100644 firestore/tests/unit/v1/testdata/listen-add-one.textproto create mode 100644 firestore/tests/unit/v1/testdata/listen-add-three.textproto create mode 100644 firestore/tests/unit/v1/testdata/listen-doc-remove.textproto create mode 100644 firestore/tests/unit/v1/testdata/listen-empty.textproto create mode 100644 firestore/tests/unit/v1/testdata/listen-filter-nop.textproto create mode 100644 firestore/tests/unit/v1/testdata/listen-multi-docs.textproto create mode 100644 firestore/tests/unit/v1/testdata/listen-nocurrent.textproto create mode 100644 firestore/tests/unit/v1/testdata/listen-nomod.textproto create mode 100644 firestore/tests/unit/v1/testdata/listen-removed-target-ids.textproto create mode 100644 firestore/tests/unit/v1/testdata/listen-reset.textproto create mode 100644 firestore/tests/unit/v1/testdata/listen-target-add-nop.textproto create mode 100644 firestore/tests/unit/v1/testdata/listen-target-add-wrong-id.textproto create mode 100644 firestore/tests/unit/v1/testdata/listen-target-remove.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-arrayremove-cursor.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-arrayremove-where.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-arrayunion-cursor.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-arrayunion-where.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-bad-NaN.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-bad-null.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-cursor-docsnap-order.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-cursor-docsnap-orderby-name.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-cursor-docsnap-where-eq.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-cursor-docsnap-where-neq-orderby.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-cursor-docsnap-where-neq.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-cursor-docsnap.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-cursor-endbefore-empty-map.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-cursor-endbefore-empty.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-cursor-no-order.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-cursor-startat-empty-map.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-cursor-startat-empty.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-cursor-vals-1a.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-cursor-vals-1b.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-cursor-vals-2.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-cursor-vals-docid.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-cursor-vals-last-wins.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-del-cursor.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-del-where.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-invalid-operator.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-invalid-path-order.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-invalid-path-select.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-invalid-path-where.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-offset-limit-last-wins.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-offset-limit.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-order.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-select-empty.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-select-last-wins.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-select.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-st-cursor.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-st-where.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-where-2.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-where-NaN.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-where-null.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-where.textproto create mode 100644 firestore/tests/unit/v1/testdata/query-wrong-collection.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-all-transforms.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-arrayremove-multi.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-arrayremove-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-arrayremove-noarray-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-arrayremove-noarray.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-arrayremove-with-st.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-arrayremove.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-arrayunion-multi.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-arrayunion-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-arrayunion-noarray-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-arrayunion-noarray.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-arrayunion-with-st.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-arrayunion.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-basic.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-complex.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-del-merge-alone.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-del-merge.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-del-mergeall.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-del-noarray-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-del-noarray.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-del-nomerge.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-del-nonleaf.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-del-wo-merge.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-empty.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-merge-fp.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-merge-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-merge-nonleaf.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-merge-prefix.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-merge-present.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-merge.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-mergeall-empty.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-mergeall-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-mergeall.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-nodel.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-nosplit.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-special-chars.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-st-alone-mergeall.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-st-alone.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-st-merge-both.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-st-merge-nonleaf-alone.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-st-merge-nonleaf.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-st-merge-nowrite.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-st-mergeall.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-st-multi.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-st-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-st-noarray-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-st-noarray.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-st-nomerge.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-st-with-empty-map.textproto create mode 100644 firestore/tests/unit/v1/testdata/set-st.textproto create mode 100644 firestore/tests/unit/v1/testdata/test-suite.binproto create mode 100644 firestore/tests/unit/v1/testdata/update-all-transforms.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-arrayremove-alone.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-arrayremove-multi.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-arrayremove-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-arrayremove-noarray-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-arrayremove-noarray.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-arrayremove-with-st.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-arrayremove.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-arrayunion-alone.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-arrayunion-multi.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-arrayunion-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-arrayunion-noarray-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-arrayunion-noarray.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-arrayunion-with-st.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-arrayunion.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-badchar.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-basic.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-complex.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-del-alone.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-del-dot.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-del-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-del-noarray-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-del-noarray.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-del.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-exists-precond.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-fp-empty-component.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-no-paths.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-all-transforms.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-arrayremove-alone.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-arrayremove-multi.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-arrayremove-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-arrayremove-noarray-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-arrayremove-noarray.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-arrayremove-with-st.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-arrayremove.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-arrayunion-alone.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-arrayunion-multi.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-arrayunion-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-arrayunion-noarray-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-arrayunion-noarray.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-arrayunion-with-st.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-arrayunion.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-basic.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-complex.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-del-alone.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-del-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-del-noarray-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-del-noarray.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-del.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-exists-precond.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-fp-del.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-fp-dup-transforms.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-fp-dup.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-fp-empty-component.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-fp-empty.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-fp-multi.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-fp-nosplit.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-no-paths.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-prefix-1.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-prefix-2.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-prefix-3.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-special-chars.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-st-alone.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-st-multi.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-st-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-st-noarray-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-st-noarray.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-st-with-empty-map.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-st.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-paths-uptime.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-prefix-1.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-prefix-2.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-prefix-3.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-quoting.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-split-top-level.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-split.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-st-alone.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-st-dot.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-st-multi.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-st-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-st-noarray-nested.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-st-noarray.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-st-with-empty-map.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-st.textproto create mode 100644 firestore/tests/unit/v1/testdata/update-uptime.textproto diff --git a/firestore/Makefile b/firestore/Makefile deleted file mode 100644 index 98730491fa80..000000000000 --- a/firestore/Makefile +++ /dev/null @@ -1,36 +0,0 @@ -# This makefile builds the protos needed for cross-language Firestore tests. - -# Assume protoc is on the path. The proto compiler must be one that -# supports proto3 syntax. -PROTOC = protoc - -# Dependent repos. -PROTOBUF_REPO = $(HOME)/git-repos/protobuf -GOOGLEAPIS_REPO = $(HOME)/git-repos/googleapis - -TESTS_REPO = $(HOME)/git-repos/gcp/google-cloud-common - -TMPDIR = /tmp/python-fs-proto -TMPDIR_FS = $(TMPDIR)/google/cloud/firestore_v1beta1/proto - -.PHONY: sync-protos gen-protos - -gen-protos: sync-protos tweak-protos - # TODO(jba): Put the generated proto somewhere more suitable. - $(PROTOC) --python_out=google/cloud/firestore_v1beta1/proto \ - -I $(TMPDIR) \ - -I $(PROTOBUF_REPO)/src \ - -I $(GOOGLEAPIS_REPO) \ - $(TMPDIR)/*.proto - -tweak-protos: - mkdir -p $(TMPDIR_FS) - cp $(GOOGLEAPIS_REPO)/google/firestore/v1beta1/*.proto $(TMPDIR_FS) - sed -i -e 's@google/firestore/v1beta1@google/cloud/firestore_v1beta1/proto@' $(TMPDIR_FS)/*.proto - cp $(TESTS_REPO)/testing/firestore/proto/*.proto $(TMPDIR) - sed -i -e 's@google/firestore/v1beta1@google/cloud/firestore_v1beta1/proto@' $(TMPDIR)/*.proto - -sync-protos: - cd $(PROTOBUF_REPO); git pull - cd $(GOOGLEAPIS_REPO); git pull - cd $(TESTS_REPO); git pull diff --git a/firestore/Makefile_v1 b/firestore/Makefile_v1 new file mode 100644 index 000000000000..5c53a900461d --- /dev/null +++ b/firestore/Makefile_v1 @@ -0,0 +1,37 @@ +# This makefile builds the protos needed for cross-language Firestore tests. + +# Assume protoc is on the path. The proto compiler must be one that +# supports proto3 syntax. +PROTOC = protoc + +# Dependent repos. +REPO_DIR=$(HOME)/git-repos +PROTOBUF_REPO = $(REPO_DIR)/protobuf +GOOGLEAPIS_REPO = $(REPO_DIR)/googleapis +TESTS_REPO = $(REPO_DIR)/gcp/google-cloud-common + +TMPDIR = /tmp/python-fs-proto +TMPDIR_FS = $(TMPDIR)/google/cloud/firestore_v1/proto + +.PHONY: sync-protos gen-protos + +gen-protos: sync-protos tweak-protos + # TODO(jba): Put the generated proto somewhere more suitable. + $(PROTOC) --python_out=google/cloud/firestore_v1/proto \ + -I $(TMPDIR) \ + -I $(PROTOBUF_REPO)/src \ + -I $(GOOGLEAPIS_REPO) \ + $(TMPDIR)/test_v1.proto + +tweak-protos: + mkdir -p $(TMPDIR_FS) + cp $(GOOGLEAPIS_REPO)/google/firestore/v1/*.proto $(TMPDIR_FS) + sed -i -e 's@google/firestore/v1@google/cloud/firestore_v1/proto@' $(TMPDIR_FS)/*.proto + cp $(TESTS_REPO)/testing/firestore/proto/test_v1.proto $(TMPDIR) + sed -i -e 's@package tests@package tests.v1@' $(TMPDIR)/test_v1.proto + sed -i -e 's@google/firestore/v1@google/cloud/firestore_v1/proto@' $(TMPDIR)/test_v1.proto + +sync-protos: + cd $(PROTOBUF_REPO); git pull + cd $(GOOGLEAPIS_REPO); git pull + #cd $(TESTS_REPO); git pull diff --git a/firestore/google/cloud/firestore_v1/__init__.py b/firestore/google/cloud/firestore_v1/__init__.py new file mode 100644 index 000000000000..360d9a2fcb26 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/__init__.py @@ -0,0 +1,65 @@ +# Copyright 2019 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Python idiomatic client for Google Cloud Firestore.""" + +from pkg_resources import get_distribution + +__version__ = get_distribution("google-cloud-firestore").version + +from google.cloud.firestore_v1 import types +from google.cloud.firestore_v1._helpers import GeoPoint +from google.cloud.firestore_v1._helpers import ExistsOption +from google.cloud.firestore_v1._helpers import LastUpdateOption +from google.cloud.firestore_v1._helpers import ReadAfterWriteError +from google.cloud.firestore_v1._helpers import WriteOption +from google.cloud.firestore_v1.batch import WriteBatch +from google.cloud.firestore_v1.client import Client +from google.cloud.firestore_v1.collection import CollectionReference +from google.cloud.firestore_v1.transforms import ArrayRemove +from google.cloud.firestore_v1.transforms import ArrayUnion +from google.cloud.firestore_v1.transforms import DELETE_FIELD +from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP +from google.cloud.firestore_v1.document import DocumentReference +from google.cloud.firestore_v1.document import DocumentSnapshot +from google.cloud.firestore_v1.gapic import enums +from google.cloud.firestore_v1.query import Query +from google.cloud.firestore_v1.transaction import Transaction +from google.cloud.firestore_v1.transaction import transactional +from google.cloud.firestore_v1.watch import Watch + + +__all__ = [ + "__version__", + "ArrayRemove", + "ArrayUnion", + "Client", + "CollectionReference", + "DELETE_FIELD", + "DocumentReference", + "DocumentSnapshot", + "enums", + "ExistsOption", + "GeoPoint", + "LastUpdateOption", + "Query", + "ReadAfterWriteError", + "SERVER_TIMESTAMP", + "Transaction", + "transactional", + "types", + "Watch", + "WriteBatch", + "WriteOption", +] diff --git a/firestore/google/cloud/firestore_v1/_helpers.py b/firestore/google/cloud/firestore_v1/_helpers.py new file mode 100644 index 000000000000..d183dddff902 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/_helpers.py @@ -0,0 +1,1002 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Common helpers shared across Google Cloud Firestore modules.""" + +import datetime + +from google.protobuf import struct_pb2 +from google.type import latlng_pb2 +import grpc +import six + +from google.cloud import exceptions +from google.cloud._helpers import _datetime_to_pb_timestamp +from google.api_core.datetime_helpers import DatetimeWithNanoseconds +from google.cloud.firestore_v1 import transforms +from google.cloud.firestore_v1 import types +from google.cloud.firestore_v1.field_path import FieldPath +from google.cloud.firestore_v1.field_path import parse_field_path +from google.cloud.firestore_v1.gapic import enums +from google.cloud.firestore_v1.proto import common_pb2 +from google.cloud.firestore_v1.proto import document_pb2 +from google.cloud.firestore_v1.proto import write_pb2 + + +BAD_PATH_TEMPLATE = "A path element must be a string. Received {}, which is a {}." +DOCUMENT_PATH_DELIMITER = "/" +INACTIVE_TXN = "Transaction not in progress, cannot be used in API requests." +READ_AFTER_WRITE_ERROR = "Attempted read after write in a transaction." +BAD_REFERENCE_ERROR = ( + "Reference value {!r} in unexpected format, expected to be of the form " + "``projects/{{project}}/databases/{{database}}/" + "documents/{{document_path}}``." +) +WRONG_APP_REFERENCE = ( + "Document {!r} does not correspond to the same database " "({!r}) as the client." +) +REQUEST_TIME_ENUM = enums.DocumentTransform.FieldTransform.ServerValue.REQUEST_TIME +_GRPC_ERROR_MAPPING = { + grpc.StatusCode.ALREADY_EXISTS: exceptions.Conflict, + grpc.StatusCode.NOT_FOUND: exceptions.NotFound, +} + + +class GeoPoint(object): + """Simple container for a geo point value. + + Args: + latitude (float): Latitude of a point. + longitude (float): Longitude of a point. + """ + + def __init__(self, latitude, longitude): + self.latitude = latitude + self.longitude = longitude + + def to_protobuf(self): + """Convert the current object to protobuf. + + Returns: + google.type.latlng_pb2.LatLng: The current point as a protobuf. + """ + return latlng_pb2.LatLng(latitude=self.latitude, longitude=self.longitude) + + def __eq__(self, other): + """Compare two geo points for equality. + + Returns: + Union[bool, NotImplemented]: :data:`True` if the points compare + equal, else :data:`False`. (Or :data:`NotImplemented` if + ``other`` is not a geo point.) + """ + if not isinstance(other, GeoPoint): + return NotImplemented + + return self.latitude == other.latitude and self.longitude == other.longitude + + def __ne__(self, other): + """Compare two geo points for inequality. + + Returns: + Union[bool, NotImplemented]: :data:`False` if the points compare + equal, else :data:`True`. (Or :data:`NotImplemented` if + ``other`` is not a geo point.) + """ + equality_val = self.__eq__(other) + if equality_val is NotImplemented: + return NotImplemented + else: + return not equality_val + + +def verify_path(path, is_collection): + """Verifies that a ``path`` has the correct form. + + Checks that all of the elements in ``path`` are strings. + + Args: + path (Tuple[str, ...]): The components in a collection or + document path. + is_collection (bool): Indicates if the ``path`` represents + a document or a collection. + + Raises: + ValueError: if + + * the ``path`` is empty + * ``is_collection=True`` and there are an even number of elements + * ``is_collection=False`` and there are an odd number of elements + * an element is not a string + """ + num_elements = len(path) + if num_elements == 0: + raise ValueError("Document or collection path cannot be empty") + + if is_collection: + if num_elements % 2 == 0: + raise ValueError("A collection must have an odd number of path elements") + else: + if num_elements % 2 == 1: + raise ValueError("A document must have an even number of path elements") + + for element in path: + if not isinstance(element, six.string_types): + msg = BAD_PATH_TEMPLATE.format(element, type(element)) + raise ValueError(msg) + + +def encode_value(value): + """Converts a native Python value into a Firestore protobuf ``Value``. + + Args: + value (Union[NoneType, bool, int, float, datetime.datetime, \ + str, bytes, dict, ~google.cloud.Firestore.GeoPoint]): A native + Python value to convert to a protobuf field. + + Returns: + ~google.cloud.firestore_v1.types.Value: A + value encoded as a Firestore protobuf. + + Raises: + TypeError: If the ``value`` is not one of the accepted types. + """ + if value is None: + return document_pb2.Value(null_value=struct_pb2.NULL_VALUE) + + # Must come before six.integer_types since ``bool`` is an integer subtype. + if isinstance(value, bool): + return document_pb2.Value(boolean_value=value) + + if isinstance(value, six.integer_types): + return document_pb2.Value(integer_value=value) + + if isinstance(value, float): + return document_pb2.Value(double_value=value) + + if isinstance(value, DatetimeWithNanoseconds): + return document_pb2.Value(timestamp_value=value.timestamp_pb()) + + if isinstance(value, datetime.datetime): + return document_pb2.Value(timestamp_value=_datetime_to_pb_timestamp(value)) + + if isinstance(value, six.text_type): + return document_pb2.Value(string_value=value) + + if isinstance(value, six.binary_type): + return document_pb2.Value(bytes_value=value) + + # NOTE: We avoid doing an isinstance() check for a Document + # here to avoid import cycles. + document_path = getattr(value, "_document_path", None) + if document_path is not None: + return document_pb2.Value(reference_value=document_path) + + if isinstance(value, GeoPoint): + return document_pb2.Value(geo_point_value=value.to_protobuf()) + + if isinstance(value, list): + value_list = [encode_value(element) for element in value] + value_pb = document_pb2.ArrayValue(values=value_list) + return document_pb2.Value(array_value=value_pb) + + if isinstance(value, dict): + value_dict = encode_dict(value) + value_pb = document_pb2.MapValue(fields=value_dict) + return document_pb2.Value(map_value=value_pb) + + raise TypeError( + "Cannot convert to a Firestore Value", value, "Invalid type", type(value) + ) + + +def encode_dict(values_dict): + """Encode a dictionary into protobuf ``Value``-s. + + Args: + values_dict (dict): The dictionary to encode as protobuf fields. + + Returns: + Dict[str, ~google.cloud.firestore_v1.types.Value]: A + dictionary of string keys and ``Value`` protobufs as dictionary + values. + """ + return {key: encode_value(value) for key, value in six.iteritems(values_dict)} + + +def reference_value_to_document(reference_value, client): + """Convert a reference value string to a document. + + Args: + reference_value (str): A document reference value. + client (~.firestore_v1.client.Client): A client that has + a document factory. + + Returns: + ~.firestore_v1.document.DocumentReference: The document + corresponding to ``reference_value``. + + Raises: + ValueError: If the ``reference_value`` is not of the expected + format: ``projects/{project}/databases/{database}/documents/...``. + ValueError: If the ``reference_value`` does not come from the same + project / database combination as the ``client``. + """ + # The first 5 parts are + # projects, {project}, databases, {database}, documents + parts = reference_value.split(DOCUMENT_PATH_DELIMITER, 5) + if len(parts) != 6: + msg = BAD_REFERENCE_ERROR.format(reference_value) + raise ValueError(msg) + + # The sixth part is `a/b/c/d` (i.e. the document path) + document = client.document(parts[-1]) + if document._document_path != reference_value: + msg = WRONG_APP_REFERENCE.format(reference_value, client._database_string) + raise ValueError(msg) + + return document + + +def decode_value(value, client): + """Converts a Firestore protobuf ``Value`` to a native Python value. + + Args: + value (google.cloud.firestore_v1.types.Value): A + Firestore protobuf to be decoded / parsed / converted. + client (~.firestore_v1.client.Client): A client that has + a document factory. + + Returns: + Union[NoneType, bool, int, float, datetime.datetime, \ + str, bytes, dict, ~google.cloud.Firestore.GeoPoint]: A native + Python value converted from the ``value``. + + Raises: + NotImplementedError: If the ``value_type`` is ``reference_value``. + ValueError: If the ``value_type`` is unknown. + """ + value_type = value.WhichOneof("value_type") + + if value_type == "null_value": + return None + elif value_type == "boolean_value": + return value.boolean_value + elif value_type == "integer_value": + return value.integer_value + elif value_type == "double_value": + return value.double_value + elif value_type == "timestamp_value": + return DatetimeWithNanoseconds.from_timestamp_pb(value.timestamp_value) + elif value_type == "string_value": + return value.string_value + elif value_type == "bytes_value": + return value.bytes_value + elif value_type == "reference_value": + return reference_value_to_document(value.reference_value, client) + elif value_type == "geo_point_value": + return GeoPoint(value.geo_point_value.latitude, value.geo_point_value.longitude) + elif value_type == "array_value": + return [decode_value(element, client) for element in value.array_value.values] + elif value_type == "map_value": + return decode_dict(value.map_value.fields, client) + else: + raise ValueError("Unknown ``value_type``", value_type) + + +def decode_dict(value_fields, client): + """Converts a protobuf map of Firestore ``Value``-s. + + Args: + value_fields (google.protobuf.pyext._message.MessageMapContainer): A + protobuf map of Firestore ``Value``-s. + client (~.firestore_v1.client.Client): A client that has + a document factory. + + Returns: + Dict[str, Union[NoneType, bool, int, float, datetime.datetime, \ + str, bytes, dict, ~google.cloud.Firestore.GeoPoint]]: A dictionary + of native Python values converted from the ``value_fields``. + """ + return { + key: decode_value(value, client) for key, value in six.iteritems(value_fields) + } + + +def get_doc_id(document_pb, expected_prefix): + """Parse a document ID from a document protobuf. + + Args: + document_pb (google.cloud.proto.firestore.v1.\ + document_pb2.Document): A protobuf for a document that + was created in a ``CreateDocument`` RPC. + expected_prefix (str): The expected collection prefix for the + fully-qualified document name. + + Returns: + str: The document ID from the protobuf. + + Raises: + ValueError: If the name does not begin with the prefix. + """ + prefix, document_id = document_pb.name.rsplit(DOCUMENT_PATH_DELIMITER, 1) + if prefix != expected_prefix: + raise ValueError( + "Unexpected document name", + document_pb.name, + "Expected to begin with", + expected_prefix, + ) + + return document_id + + +_EmptyDict = transforms.Sentinel("Marker for an empty dict value") + + +def extract_fields(document_data, prefix_path, expand_dots=False): + """Do depth-first walk of tree, yielding field_path, value""" + if not document_data: + yield prefix_path, _EmptyDict + else: + for key, value in sorted(six.iteritems(document_data)): + + if expand_dots: + sub_key = FieldPath.from_string(key) + else: + sub_key = FieldPath(key) + + field_path = FieldPath(*(prefix_path.parts + sub_key.parts)) + + if isinstance(value, dict): + for s_path, s_value in extract_fields(value, field_path): + yield s_path, s_value + else: + yield field_path, value + + +def set_field_value(document_data, field_path, value): + """Set a value into a document for a field_path""" + current = document_data + for element in field_path.parts[:-1]: + current = current.setdefault(element, {}) + if value is _EmptyDict: + value = {} + current[field_path.parts[-1]] = value + + +def get_field_value(document_data, field_path): + if not field_path.parts: + raise ValueError("Empty path") + + current = document_data + for element in field_path.parts[:-1]: + current = current[element] + return current[field_path.parts[-1]] + + +class DocumentExtractor(object): + """ Break document data up into actual data and transforms. + + Handle special values such as ``DELETE_FIELD``, ``SERVER_TIMESTAMP``. + + Args: + document_data (dict): + Property names and values to use for sending a change to + a document. + """ + + def __init__(self, document_data): + self.document_data = document_data + self.field_paths = [] + self.deleted_fields = [] + self.server_timestamps = [] + self.array_removes = {} + self.array_unions = {} + self.set_fields = {} + self.empty_document = False + + prefix_path = FieldPath() + iterator = self._get_document_iterator(prefix_path) + + for field_path, value in iterator: + + if field_path == prefix_path and value is _EmptyDict: + self.empty_document = True + + elif value is transforms.DELETE_FIELD: + self.deleted_fields.append(field_path) + + elif value is transforms.SERVER_TIMESTAMP: + self.server_timestamps.append(field_path) + + elif isinstance(value, transforms.ArrayRemove): + self.array_removes[field_path] = value.values + + elif isinstance(value, transforms.ArrayUnion): + self.array_unions[field_path] = value.values + + else: + self.field_paths.append(field_path) + set_field_value(self.set_fields, field_path, value) + + def _get_document_iterator(self, prefix_path): + return extract_fields(self.document_data, prefix_path) + + @property + def has_transforms(self): + return bool(self.server_timestamps or self.array_removes or self.array_unions) + + @property + def transform_paths(self): + return sorted( + self.server_timestamps + list(self.array_removes) + list(self.array_unions) + ) + + def _get_update_mask(self, allow_empty_mask=False): + return None + + def get_update_pb(self, document_path, exists=None, allow_empty_mask=False): + + if exists is not None: + current_document = common_pb2.Precondition(exists=exists) + else: + current_document = None + + update_pb = write_pb2.Write( + update=document_pb2.Document( + name=document_path, fields=encode_dict(self.set_fields) + ), + update_mask=self._get_update_mask(allow_empty_mask), + current_document=current_document, + ) + + return update_pb + + def get_transform_pb(self, document_path, exists=None): + def make_array_value(values): + value_list = [encode_value(element) for element in values] + return document_pb2.ArrayValue(values=value_list) + + path_field_transforms = ( + [ + ( + path, + write_pb2.DocumentTransform.FieldTransform( + field_path=path.to_api_repr(), + set_to_server_value=REQUEST_TIME_ENUM, + ), + ) + for path in self.server_timestamps + ] + + [ + ( + path, + write_pb2.DocumentTransform.FieldTransform( + field_path=path.to_api_repr(), + remove_all_from_array=make_array_value(values), + ), + ) + for path, values in self.array_removes.items() + ] + + [ + ( + path, + write_pb2.DocumentTransform.FieldTransform( + field_path=path.to_api_repr(), + append_missing_elements=make_array_value(values), + ), + ) + for path, values in self.array_unions.items() + ] + ) + field_transforms = [ + transform for path, transform in sorted(path_field_transforms) + ] + transform_pb = write_pb2.Write( + transform=write_pb2.DocumentTransform( + document=document_path, field_transforms=field_transforms + ) + ) + if exists is not None: + transform_pb.current_document.CopyFrom( + common_pb2.Precondition(exists=exists) + ) + + return transform_pb + + +def pbs_for_create(document_path, document_data): + """Make ``Write`` protobufs for ``create()`` methods. + + Args: + document_path (str): A fully-qualified document path. + document_data (dict): Property names and values to use for + creating a document. + + Returns: + List[google.cloud.firestore_v1.types.Write]: One or two + ``Write`` protobuf instances for ``create()``. + """ + extractor = DocumentExtractor(document_data) + + if extractor.deleted_fields: + raise ValueError("Cannot apply DELETE_FIELD in a create request.") + + write_pbs = [] + + # Conformance tests require skipping the 'update_pb' if the document + # contains only transforms. + if extractor.empty_document or extractor.set_fields: + write_pbs.append(extractor.get_update_pb(document_path, exists=False)) + + if extractor.has_transforms: + exists = None if write_pbs else False + transform_pb = extractor.get_transform_pb(document_path, exists) + write_pbs.append(transform_pb) + + return write_pbs + + +def pbs_for_set_no_merge(document_path, document_data): + """Make ``Write`` protobufs for ``set()`` methods. + + Args: + document_path (str): A fully-qualified document path. + document_data (dict): Property names and values to use for + replacing a document. + + Returns: + List[google.cloud.firestore_v1.types.Write]: One + or two ``Write`` protobuf instances for ``set()``. + """ + extractor = DocumentExtractor(document_data) + + if extractor.deleted_fields: + raise ValueError( + "Cannot apply DELETE_FIELD in a set request without " + "specifying 'merge=True' or 'merge=[field_paths]'." + ) + + # Conformance tests require send the 'update_pb' even if the document + # contains only transforms. + write_pbs = [extractor.get_update_pb(document_path)] + + if extractor.has_transforms: + transform_pb = extractor.get_transform_pb(document_path) + write_pbs.append(transform_pb) + + return write_pbs + + +class DocumentExtractorForMerge(DocumentExtractor): + """ Break document data up into actual data and transforms. + """ + + def __init__(self, document_data): + super(DocumentExtractorForMerge, self).__init__(document_data) + self.data_merge = [] + self.transform_merge = [] + self.merge = [] + + @property + def has_updates(self): + # for whatever reason, the conformance tests want to see the parent + # of nested transform paths in the update mask + # (see set-st-merge-nonleaf-alone.textproto) + update_paths = set(self.data_merge) + + for transform_path in self.transform_paths: + if len(transform_path.parts) > 1: + parent_fp = FieldPath(*transform_path.parts[:-1]) + update_paths.add(parent_fp) + + return bool(update_paths) + + def _apply_merge_all(self): + self.data_merge = sorted(self.field_paths + self.deleted_fields) + # TODO: other transforms + self.transform_merge = self.transform_paths + self.merge = sorted(self.data_merge + self.transform_paths) + + def _construct_merge_paths(self, merge): + for merge_field in merge: + if isinstance(merge_field, FieldPath): + yield merge_field + else: + yield FieldPath(*parse_field_path(merge_field)) + + def _normalize_merge_paths(self, merge): + merge_paths = sorted(self._construct_merge_paths(merge)) + + # Raise if any merge path is a parent of another. Leverage sorting + # to avoid quadratic behavior. + for index in range(len(merge_paths) - 1): + lhs, rhs = merge_paths[index], merge_paths[index + 1] + if lhs.eq_or_parent(rhs): + raise ValueError("Merge paths overlap: {}, {}".format(lhs, rhs)) + + for merge_path in merge_paths: + if merge_path in self.deleted_fields: + continue + try: + get_field_value(self.document_data, merge_path) + except KeyError: + raise ValueError("Invalid merge path: {}".format(merge_path)) + + return merge_paths + + def _apply_merge_paths(self, merge): + + if self.empty_document: + raise ValueError("Cannot merge specific fields with empty document.") + + merge_paths = self._normalize_merge_paths(merge) + + del self.data_merge[:] + del self.transform_merge[:] + self.merge = merge_paths + + for merge_path in merge_paths: + + if merge_path in self.transform_paths: + self.transform_merge.append(merge_path) + + for field_path in self.field_paths: + if merge_path.eq_or_parent(field_path): + self.data_merge.append(field_path) + + # Clear out data for fields not merged. + merged_set_fields = {} + for field_path in self.data_merge: + value = get_field_value(self.document_data, field_path) + set_field_value(merged_set_fields, field_path, value) + self.set_fields = merged_set_fields + + unmerged_deleted_fields = [ + field_path + for field_path in self.deleted_fields + if field_path not in self.merge + ] + if unmerged_deleted_fields: + raise ValueError( + "Cannot delete unmerged fields: {}".format(unmerged_deleted_fields) + ) + self.data_merge = sorted(self.data_merge + self.deleted_fields) + + # Keep only transforms which are within merge. + merged_transform_paths = set() + for merge_path in self.merge: + tranform_merge_paths = [ + transform_path + for transform_path in self.transform_paths + if merge_path.eq_or_parent(transform_path) + ] + merged_transform_paths.update(tranform_merge_paths) + + self.server_timestamps = [ + path for path in self.server_timestamps if path in merged_transform_paths + ] + + self.array_removes = { + path: values + for path, values in self.array_removes.items() + if path in merged_transform_paths + } + + self.array_unions = { + path: values + for path, values in self.array_unions.items() + if path in merged_transform_paths + } + + def apply_merge(self, merge): + if merge is True: # merge all fields + self._apply_merge_all() + else: + self._apply_merge_paths(merge) + + def _get_update_mask(self, allow_empty_mask=False): + # Mask uses dotted / quoted paths. + mask_paths = [ + field_path.to_api_repr() + for field_path in self.merge + if field_path not in self.transform_merge + ] + + if mask_paths or allow_empty_mask: + return common_pb2.DocumentMask(field_paths=mask_paths) + + +def pbs_for_set_with_merge(document_path, document_data, merge): + """Make ``Write`` protobufs for ``set()`` methods. + + Args: + document_path (str): A fully-qualified document path. + document_data (dict): Property names and values to use for + replacing a document. + merge (Optional[bool] or Optional[List]): + If True, merge all fields; else, merge only the named fields. + + Returns: + List[google.cloud.firestore_v1.types.Write]: One + or two ``Write`` protobuf instances for ``set()``. + """ + extractor = DocumentExtractorForMerge(document_data) + extractor.apply_merge(merge) + + merge_empty = not document_data + + write_pbs = [] + + if extractor.has_updates or merge_empty: + write_pbs.append( + extractor.get_update_pb(document_path, allow_empty_mask=merge_empty) + ) + + if extractor.transform_paths: + transform_pb = extractor.get_transform_pb(document_path) + write_pbs.append(transform_pb) + + return write_pbs + + +class DocumentExtractorForUpdate(DocumentExtractor): + """ Break document data up into actual data and transforms. + """ + + def __init__(self, document_data): + super(DocumentExtractorForUpdate, self).__init__(document_data) + self.top_level_paths = sorted( + [FieldPath.from_string(key) for key in document_data] + ) + tops = set(self.top_level_paths) + for top_level_path in self.top_level_paths: + for ancestor in top_level_path.lineage(): + if ancestor in tops: + raise ValueError( + "Conflicting field path: {}, {}".format( + top_level_path, ancestor + ) + ) + + for field_path in self.deleted_fields: + if field_path not in tops: + raise ValueError( + "Cannot update with nest delete: {}".format(field_path) + ) + + def _get_document_iterator(self, prefix_path): + return extract_fields(self.document_data, prefix_path, expand_dots=True) + + def _get_update_mask(self, allow_empty_mask=False): + mask_paths = [] + for field_path in self.top_level_paths: + if field_path not in self.transform_paths: + mask_paths.append(field_path.to_api_repr()) + else: + prefix = FieldPath(*field_path.parts[:-1]) + if prefix.parts: + mask_paths.append(prefix.to_api_repr()) + + return common_pb2.DocumentMask(field_paths=mask_paths) + + +def pbs_for_update(document_path, field_updates, option): + """Make ``Write`` protobufs for ``update()`` methods. + + Args: + document_path (str): A fully-qualified document path. + field_updates (dict): Field names or paths to update and values + to update with. + option (optional[~.firestore_v1.client.WriteOption]): A + write option to make assertions / preconditions on the server + state of the document before applying changes. + + Returns: + List[google.cloud.firestore_v1.types.Write]: One + or two ``Write`` protobuf instances for ``update()``. + """ + extractor = DocumentExtractorForUpdate(field_updates) + + if extractor.empty_document: + raise ValueError("Cannot update with an empty document.") + + if option is None: # Default is to use ``exists=True``. + option = ExistsOption(exists=True) + + write_pbs = [] + + if extractor.field_paths or extractor.deleted_fields: + update_pb = extractor.get_update_pb(document_path) + option.modify_write(update_pb) + write_pbs.append(update_pb) + + if extractor.has_transforms: + transform_pb = extractor.get_transform_pb(document_path) + if not write_pbs: + # NOTE: set the write option on the ``transform_pb`` only if there + # is no ``update_pb`` + option.modify_write(transform_pb) + write_pbs.append(transform_pb) + + return write_pbs + + +def pb_for_delete(document_path, option): + """Make a ``Write`` protobuf for ``delete()`` methods. + + Args: + document_path (str): A fully-qualified document path. + option (optional[~.firestore_v1.client.WriteOption]): A + write option to make assertions / preconditions on the server + state of the document before applying changes. + + Returns: + google.cloud.firestore_v1.types.Write: A + ``Write`` protobuf instance for the ``delete()``. + """ + write_pb = write_pb2.Write(delete=document_path) + if option is not None: + option.modify_write(write_pb) + + return write_pb + + +class ReadAfterWriteError(Exception): + """Raised when a read is attempted after a write. + + Raised by "read" methods that use transactions. + """ + + +def get_transaction_id(transaction, read_operation=True): + """Get the transaction ID from a ``Transaction`` object. + + Args: + transaction (Optional[~.firestore_v1.transaction.\ + Transaction]): An existing transaction that this query will + run in. + read_operation (Optional[bool]): Indicates if the transaction ID + will be used in a read operation. Defaults to :data:`True`. + + Returns: + Optional[bytes]: The ID of the transaction, or :data:`None` if the + ``transaction`` is :data:`None`. + + Raises: + ValueError: If the ``transaction`` is not in progress (only if + ``transaction`` is not :data:`None`). + ReadAfterWriteError: If the ``transaction`` has writes stored on + it and ``read_operation`` is :data:`True`. + """ + if transaction is None: + return None + else: + if not transaction.in_progress: + raise ValueError(INACTIVE_TXN) + if read_operation and len(transaction._write_pbs) > 0: + raise ReadAfterWriteError(READ_AFTER_WRITE_ERROR) + return transaction.id + + +def metadata_with_prefix(prefix, **kw): + """Create RPC metadata containing a prefix. + + Args: + prefix (str): appropriate resource path. + + Returns: + List[Tuple[str, str]]: RPC metadata with supplied prefix + """ + return [("google-cloud-resource-prefix", prefix)] + + +class WriteOption(object): + """Option used to assert a condition on a write operation.""" + + def modify_write(self, write_pb, no_create_msg=None): + """Modify a ``Write`` protobuf based on the state of this write option. + + This is a virtual method intended to be implemented by subclasses. + + Args: + write_pb (google.cloud.firestore_v1.types.Write): A + ``Write`` protobuf instance to be modified with a precondition + determined by the state of this option. + no_create_msg (Optional[str]): A message to use to indicate that + a create operation is not allowed. + + Raises: + NotImplementedError: Always, this method is virtual. + """ + raise NotImplementedError + + +class LastUpdateOption(WriteOption): + """Option used to assert a "last update" condition on a write operation. + + This will typically be created by + :meth:`~.firestore_v1.client.Client.write_option`. + + Args: + last_update_time (google.protobuf.timestamp_pb2.Timestamp): A + timestamp. When set, the target document must exist and have + been last updated at that time. Protobuf ``update_time`` timestamps + are typically returned from methods that perform write operations + as part of a "write result" protobuf or directly. + """ + + def __init__(self, last_update_time): + self._last_update_time = last_update_time + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return NotImplemented + return self._last_update_time == other._last_update_time + + def modify_write(self, write_pb, **unused_kwargs): + """Modify a ``Write`` protobuf based on the state of this write option. + + The ``last_update_time`` is added to ``write_pb`` as an "update time" + precondition. When set, the target document must exist and have been + last updated at that time. + + Args: + write_pb (google.cloud.firestore_v1.types.Write): A + ``Write`` protobuf instance to be modified with a precondition + determined by the state of this option. + unused_kwargs (Dict[str, Any]): Keyword arguments accepted by + other subclasses that are unused here. + """ + current_doc = types.Precondition(update_time=self._last_update_time) + write_pb.current_document.CopyFrom(current_doc) + + +class ExistsOption(WriteOption): + """Option used to assert existence on a write operation. + + This will typically be created by + :meth:`~.firestore_v1.client.Client.write_option`. + + Args: + exists (bool): Indicates if the document being modified + should already exist. + """ + + def __init__(self, exists): + self._exists = exists + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return NotImplemented + return self._exists == other._exists + + def modify_write(self, write_pb, **unused_kwargs): + """Modify a ``Write`` protobuf based on the state of this write option. + + If: + + * ``exists=True``, adds a precondition that requires existence + * ``exists=False``, adds a precondition that requires non-existence + + Args: + write_pb (google.cloud.firestore_v1.types.Write): A + ``Write`` protobuf instance to be modified with a precondition + determined by the state of this option. + unused_kwargs (Dict[str, Any]): Keyword arguments accepted by + other subclasses that are unused here. + """ + current_doc = types.Precondition(exists=self._exists) + write_pb.current_document.CopyFrom(current_doc) diff --git a/firestore/google/cloud/firestore_v1/batch.py b/firestore/google/cloud/firestore_v1/batch.py new file mode 100644 index 000000000000..1bcbe22aa8b7 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/batch.py @@ -0,0 +1,161 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for batch requests to the Google Cloud Firestore API.""" + + +from google.cloud.firestore_v1 import _helpers + + +class WriteBatch(object): + """Accumulate write operations to be sent in a batch. + + This has the same set of methods for write operations that + :class:`~.firestore_v1.document.DocumentReference` does, + e.g. :meth:`~.firestore_v1.document.DocumentReference.create`. + + Args: + client (~.firestore_v1.client.Client): The client that + created this batch. + """ + + def __init__(self, client): + self._client = client + self._write_pbs = [] + self.write_results = None + self.commit_time = None + + def _add_write_pbs(self, write_pbs): + """Add `Write`` protobufs to this transaction. + + This method intended to be over-ridden by subclasses. + + Args: + write_pbs (List[google.cloud.proto.firestore.v1.\ + write_pb2.Write]): A list of write protobufs to be added. + """ + self._write_pbs.extend(write_pbs) + + def create(self, reference, document_data): + """Add a "change" to this batch to create a document. + + If the document given by ``reference`` already exists, then this + batch will fail when :meth:`commit`-ed. + + Args: + reference (~.firestore_v1.document.DocumentReference): A + document reference to be created in this batch. + document_data (dict): Property names and values to use for + creating a document. + """ + write_pbs = _helpers.pbs_for_create(reference._document_path, document_data) + self._add_write_pbs(write_pbs) + + def set(self, reference, document_data, merge=False): + """Add a "change" to replace a document. + + See + :meth:`~.firestore_v1.document.DocumentReference.set` for + more information on how ``option`` determines how the change is + applied. + + Args: + reference (~.firestore_v1.document.DocumentReference): + A document reference that will have values set in this batch. + document_data (dict): + Property names and values to use for replacing a document. + merge (Optional[bool] or Optional[List]): + If True, apply merging instead of overwriting the state + of the document. + """ + if merge is not False: + write_pbs = _helpers.pbs_for_set_with_merge( + reference._document_path, document_data, merge + ) + else: + write_pbs = _helpers.pbs_for_set_no_merge( + reference._document_path, document_data + ) + + self._add_write_pbs(write_pbs) + + def update(self, reference, field_updates, option=None): + """Add a "change" to update a document. + + See + :meth:`~.firestore_v1.document.DocumentReference.update` for + more information on ``field_updates`` and ``option``. + + Args: + reference (~.firestore_v1.document.DocumentReference): A + document reference that will be deleted in this batch. + field_updates (dict): Field names or paths to update and values + to update with. + option (Optional[~.firestore_v1.client.WriteOption]): A + write option to make assertions / preconditions on the server + state of the document before applying changes. + """ + if option.__class__.__name__ == "ExistsOption": + raise ValueError("you must not pass an explicit write option to " "update.") + write_pbs = _helpers.pbs_for_update( + reference._document_path, field_updates, option + ) + self._add_write_pbs(write_pbs) + + def delete(self, reference, option=None): + """Add a "change" to delete a document. + + See + :meth:`~.firestore_v1.document.DocumentReference.delete` for + more information on how ``option`` determines how the change is + applied. + + Args: + reference (~.firestore_v1.document.DocumentReference): A + document reference that will be deleted in this batch. + option (Optional[~.firestore_v1.client.WriteOption]): A + write option to make assertions / preconditions on the server + state of the document before applying changes. + """ + write_pb = _helpers.pb_for_delete(reference._document_path, option) + self._add_write_pbs([write_pb]) + + def commit(self): + """Commit the changes accumulated in this batch. + + Returns: + List[google.cloud.proto.firestore.v1.\ + write_pb2.WriteResult, ...]: The write results corresponding + to the changes committed, returned in the same order as the + changes were applied to this batch. A write result contains an + ``update_time`` field. + """ + commit_response = self._client._firestore_api.commit( + self._client._database_string, + self._write_pbs, + transaction=None, + metadata=self._client._rpc_metadata, + ) + + self._write_pbs = [] + self.write_results = results = list(commit_response.write_results) + self.commit_time = commit_response.commit_time + return results + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + if exc_type is None: + self.commit() diff --git a/firestore/google/cloud/firestore_v1/client.py b/firestore/google/cloud/firestore_v1/client.py new file mode 100644 index 000000000000..8c7c3f660807 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/client.py @@ -0,0 +1,513 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Client for interacting with the Google Cloud Firestore API. + +This is the base from which all interactions with the API occur. + +In the hierarchy of API concepts + +* a :class:`~.firestore_v1.client.Client` owns a + :class:`~.firestore_v1.collection.CollectionReference` +* a :class:`~.firestore_v1.client.Client` owns a + :class:`~.firestore_v1.document.DocumentReference` +""" +from google.cloud.client import ClientWithProject + +from google.cloud.firestore_v1 import _helpers +from google.cloud.firestore_v1 import types +from google.cloud.firestore_v1.batch import WriteBatch +from google.cloud.firestore_v1.collection import CollectionReference +from google.cloud.firestore_v1.document import DocumentReference +from google.cloud.firestore_v1.document import DocumentSnapshot +from google.cloud.firestore_v1.field_path import render_field_path +from google.cloud.firestore_v1.gapic import firestore_client +from google.cloud.firestore_v1.transaction import Transaction + + +DEFAULT_DATABASE = "(default)" +"""str: The default database used in a :class:`~.firestore.client.Client`.""" +_BAD_OPTION_ERR = ( + "Exactly one of ``last_update_time`` or ``exists`` " "must be provided." +) +_BAD_DOC_TEMPLATE = ( + "Document {!r} appeared in response but was not present among references" +) +_ACTIVE_TXN = "There is already an active transaction." +_INACTIVE_TXN = "There is no active transaction." + + +class Client(ClientWithProject): + """Client for interacting with Google Cloud Firestore API. + + .. note:: + + Since the Cloud Firestore API requires the gRPC transport, no + ``_http`` argument is accepted by this class. + + Args: + project (Optional[str]): The project which the client acts on behalf + of. If not passed, falls back to the default inferred + from the environment. + credentials (Optional[~google.auth.credentials.Credentials]): The + OAuth2 Credentials to use for this client. If not passed, falls + back to the default inferred from the environment. + database (Optional[str]): The database name that the client targets. + For now, :attr:`DEFAULT_DATABASE` (the default value) is the + only valid database. + """ + + SCOPE = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ) + """The scopes required for authenticating with the Firestore service.""" + + _firestore_api_internal = None + _database_string_internal = None + _rpc_metadata_internal = None + + def __init__(self, project=None, credentials=None, database=DEFAULT_DATABASE): + # NOTE: This API has no use for the _http argument, but sending it + # will have no impact since the _http() @property only lazily + # creates a working HTTP object. + super(Client, self).__init__( + project=project, credentials=credentials, _http=None + ) + self._database = database + + @property + def _firestore_api(self): + """Lazy-loading getter GAPIC Firestore API. + + Returns: + ~.gapic.firestore.v1.firestore_client.FirestoreClient: The + GAPIC client with the credentials of the current client. + """ + if self._firestore_api_internal is None: + self._firestore_api_internal = firestore_client.FirestoreClient( + credentials=self._credentials + ) + + return self._firestore_api_internal + + @property + def _database_string(self): + """The database string corresponding to this client's project. + + This value is lazy-loaded and cached. + + Will be of the form + + ``projects/{project_id}/databases/{database_id}`` + + but ``database_id == '(default)'`` for the time being. + + Returns: + str: The fully-qualified database string for the current + project. (The default database is also in this string.) + """ + if self._database_string_internal is None: + # NOTE: database_root_path() is a classmethod, so we don't use + # self._firestore_api (it isn't necessary). + db_str = firestore_client.FirestoreClient.database_root_path( + self.project, self._database + ) + self._database_string_internal = db_str + + return self._database_string_internal + + @property + def _rpc_metadata(self): + """The RPC metadata for this client's associated database. + + Returns: + Sequence[Tuple(str, str)]: RPC metadata with resource prefix + for the database associated with this client. + """ + if self._rpc_metadata_internal is None: + self._rpc_metadata_internal = _helpers.metadata_with_prefix( + self._database_string + ) + + return self._rpc_metadata_internal + + def collection(self, *collection_path): + """Get a reference to a collection. + + For a top-level collection: + + .. code-block:: python + + >>> client.collection('top') + + For a sub-collection: + + .. code-block:: python + + >>> client.collection('mydocs/doc/subcol') + >>> # is the same as + >>> client.collection('mydocs', 'doc', 'subcol') + + Sub-collections can be nested deeper in a similar fashion. + + Args: + collection_path (Tuple[str, ...]): Can either be + + * A single ``/``-delimited path to a collection + * A tuple of collection path segments + + Returns: + ~.firestore_v1.collection.CollectionReference: A reference + to a collection in the Firestore database. + """ + if len(collection_path) == 1: + path = collection_path[0].split(_helpers.DOCUMENT_PATH_DELIMITER) + else: + path = collection_path + + return CollectionReference(*path, client=self) + + def document(self, *document_path): + """Get a reference to a document in a collection. + + For a top-level document: + + .. code-block:: python + + >>> client.document('collek/shun') + >>> # is the same as + >>> client.document('collek', 'shun') + + For a document in a sub-collection: + + .. code-block:: python + + >>> client.document('mydocs/doc/subcol/child') + >>> # is the same as + >>> client.document('mydocs', 'doc', 'subcol', 'child') + + Documents in sub-collections can be nested deeper in a similar fashion. + + Args: + document_path (Tuple[str, ...]): Can either be + + * A single ``/``-delimited path to a document + * A tuple of document path segments + + Returns: + ~.firestore_v1.document.DocumentReference: A reference + to a document in a collection. + """ + if len(document_path) == 1: + path = document_path[0].split(_helpers.DOCUMENT_PATH_DELIMITER) + else: + path = document_path + + return DocumentReference(*path, client=self) + + @staticmethod + def field_path(*field_names): + """Create a **field path** from a list of nested field names. + + A **field path** is a ``.``-delimited concatenation of the field + names. It is used to represent a nested field. For example, + in the data + + .. code-block:: python + + data = { + 'aa': { + 'bb': { + 'cc': 10, + }, + }, + } + + the field path ``'aa.bb.cc'`` represents the data stored in + ``data['aa']['bb']['cc']``. + + Args: + field_names (Tuple[str, ...]): The list of field names. + + Returns: + str: The ``.``-delimited field path. + """ + return render_field_path(field_names) + + @staticmethod + def write_option(**kwargs): + """Create a write option for write operations. + + Write operations include :meth:`~.DocumentReference.set`, + :meth:`~.DocumentReference.update` and + :meth:`~.DocumentReference.delete`. + + One of the following keyword arguments must be provided: + + * ``last_update_time`` (:class:`google.protobuf.timestamp_pb2.\ + Timestamp`): A timestamp. When set, the target document must + exist and have been last updated at that time. Protobuf + ``update_time`` timestamps are typically returned from methods + that perform write operations as part of a "write result" + protobuf or directly. + * ``exists`` (:class:`bool`): Indicates if the document being modified + should already exist. + + Providing no argument would make the option have no effect (so + it is not allowed). Providing multiple would be an apparent + contradiction, since ``last_update_time`` assumes that the + document **was** updated (it can't have been updated if it + doesn't exist) and ``exists`` indicate that it is unknown if the + document exists or not. + + Args: + kwargs (Dict[str, Any]): The keyword arguments described above. + + Raises: + TypeError: If anything other than exactly one argument is + provided by the caller. + """ + if len(kwargs) != 1: + raise TypeError(_BAD_OPTION_ERR) + + name, value = kwargs.popitem() + if name == "last_update_time": + return _helpers.LastUpdateOption(value) + elif name == "exists": + return _helpers.ExistsOption(value) + else: + extra = "{!r} was provided".format(name) + raise TypeError(_BAD_OPTION_ERR, extra) + + def get_all(self, references, field_paths=None, transaction=None): + """Retrieve a batch of documents. + + .. note:: + + Documents returned by this method are not guaranteed to be + returned in the same order that they are given in ``references``. + + .. note:: + + If multiple ``references`` refer to the same document, the server + will only return one result. + + See :meth:`~.firestore_v1.client.Client.field_path` for + more information on **field paths**. + + If a ``transaction`` is used and it already has write operations + added, this method cannot be used (i.e. read-after-write is not + allowed). + + Args: + references (List[.DocumentReference, ...]): Iterable of document + references to be retrieved. + field_paths (Optional[Iterable[str, ...]]): An iterable of field + paths (``.``-delimited list of field names) to use as a + projection of document fields in the returned results. If + no value is provided, all fields will be returned. + transaction (Optional[~.firestore_v1.transaction.\ + Transaction]): An existing transaction that these + ``references`` will be retrieved in. + + Yields: + .DocumentSnapshot: The next document snapshot that fulfills the + query, or :data:`None` if the document does not exist. + """ + document_paths, reference_map = _reference_info(references) + mask = _get_doc_mask(field_paths) + response_iterator = self._firestore_api.batch_get_documents( + self._database_string, + document_paths, + mask, + transaction=_helpers.get_transaction_id(transaction), + metadata=self._rpc_metadata, + ) + + for get_doc_response in response_iterator: + yield _parse_batch_get(get_doc_response, reference_map, self) + + def collections(self): + """List top-level collections of the client's database. + + Returns: + Sequence[~.firestore_v1.collection.CollectionReference]: + iterator of subcollections of the current document. + """ + iterator = self._firestore_api.list_collection_ids( + self._database_string, metadata=self._rpc_metadata + ) + iterator.client = self + iterator.item_to_value = _item_to_collection_ref + return iterator + + def batch(self): + """Get a batch instance from this client. + + Returns: + ~.firestore_v1.batch.WriteBatch: A "write" batch to be + used for accumulating document changes and sending the changes + all at once. + """ + return WriteBatch(self) + + def transaction(self, **kwargs): + """Get a transaction that uses this client. + + See :class:`~.firestore_v1.transaction.Transaction` for + more information on transactions and the constructor arguments. + + Args: + kwargs (Dict[str, Any]): The keyword arguments (other than + ``client``) to pass along to the + :class:`~.firestore_v1.transaction.Transaction` + constructor. + + Returns: + ~.firestore_v1.transaction.Transaction: A transaction + attached to this client. + """ + return Transaction(self, **kwargs) + + +def _reference_info(references): + """Get information about document references. + + Helper for :meth:`~.firestore_v1.client.Client.get_all`. + + Args: + references (List[.DocumentReference, ...]): Iterable of document + references. + + Returns: + Tuple[List[str, ...], Dict[str, .DocumentReference]]: A two-tuple of + + * fully-qualified documents paths for each reference in ``references`` + * a mapping from the paths to the original reference. (If multiple + ``references`` contains multiple references to the same document, + that key will be overwritten in the result.) + """ + document_paths = [] + reference_map = {} + for reference in references: + doc_path = reference._document_path + document_paths.append(doc_path) + reference_map[doc_path] = reference + + return document_paths, reference_map + + +def _get_reference(document_path, reference_map): + """Get a document reference from a dictionary. + + This just wraps a simple dictionary look-up with a helpful error that is + specific to :meth:`~.firestore.client.Client.get_all`, the + **public** caller of this function. + + Args: + document_path (str): A fully-qualified document path. + reference_map (Dict[str, .DocumentReference]): A mapping (produced + by :func:`_reference_info`) of fully-qualified document paths to + document references. + + Returns: + .DocumentReference: The matching reference. + + Raises: + ValueError: If ``document_path`` has not been encountered. + """ + try: + return reference_map[document_path] + except KeyError: + msg = _BAD_DOC_TEMPLATE.format(document_path) + raise ValueError(msg) + + +def _parse_batch_get(get_doc_response, reference_map, client): + """Parse a `BatchGetDocumentsResponse` protobuf. + + Args: + get_doc_response (~google.cloud.proto.firestore.v1.\ + firestore_pb2.BatchGetDocumentsResponse): A single response (from + a stream) containing the "get" response for a document. + reference_map (Dict[str, .DocumentReference]): A mapping (produced + by :func:`_reference_info`) of fully-qualified document paths to + document references. + client (~.firestore_v1.client.Client): A client that has + a document factory. + + Returns: + [.DocumentSnapshot]: The retrieved snapshot. + + Raises: + ValueError: If the response has a ``result`` field (a oneof) other + than ``found`` or ``missing``. + """ + result_type = get_doc_response.WhichOneof("result") + if result_type == "found": + reference = _get_reference(get_doc_response.found.name, reference_map) + data = _helpers.decode_dict(get_doc_response.found.fields, client) + snapshot = DocumentSnapshot( + reference, + data, + exists=True, + read_time=get_doc_response.read_time, + create_time=get_doc_response.found.create_time, + update_time=get_doc_response.found.update_time, + ) + elif result_type == "missing": + snapshot = DocumentSnapshot( + None, + None, + exists=False, + read_time=get_doc_response.read_time, + create_time=None, + update_time=None, + ) + else: + raise ValueError( + "`BatchGetDocumentsResponse.result` (a oneof) had a field other " + "than `found` or `missing` set, or was unset" + ) + return snapshot + + +def _get_doc_mask(field_paths): + """Get a document mask if field paths are provided. + + Args: + field_paths (Optional[Iterable[str, ...]]): An iterable of field + paths (``.``-delimited list of field names) to use as a + projection of document fields in the returned results. + + Returns: + Optional[google.cloud.firestore_v1.types.DocumentMask]: A mask + to project documents to a restricted set of field paths. + """ + if field_paths is None: + return None + else: + return types.DocumentMask(field_paths=field_paths) + + +def _item_to_collection_ref(iterator, item): + """Convert collection ID to collection ref. + + Args: + iterator (google.api_core.page_iterator.GRPCIterator): + iterator response + item (str): ID of the collection + """ + return iterator.client.collection(item) diff --git a/firestore/google/cloud/firestore_v1/collection.py b/firestore/google/cloud/firestore_v1/collection.py new file mode 100644 index 000000000000..e7b999448056 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/collection.py @@ -0,0 +1,477 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Classes for representing collections for the Google Cloud Firestore API.""" +import random +import warnings + +import six + +from google.cloud.firestore_v1 import _helpers +from google.cloud.firestore_v1 import query as query_mod +from google.cloud.firestore_v1.proto import document_pb2 +from google.cloud.firestore_v1.watch import Watch +from google.cloud.firestore_v1 import document + +_AUTO_ID_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" + + +class CollectionReference(object): + """A reference to a collection in a Firestore database. + + The collection may already exist or this class can facilitate creation + of documents within the collection. + + Args: + path (Tuple[str, ...]): The components in the collection path. + This is a series of strings representing each collection and + sub-collection ID, as well as the document IDs for any documents + that contain a sub-collection. + kwargs (dict): The keyword arguments for the constructor. The only + supported keyword is ``client`` and it must be a + :class:`~.firestore_v1.client.Client` if provided. It + represents the client that created this collection reference. + + Raises: + ValueError: if + + * the ``path`` is empty + * there are an even number of elements + * a collection ID in ``path`` is not a string + * a document ID in ``path`` is not a string + TypeError: If a keyword other than ``client`` is used. + """ + + def __init__(self, *path, **kwargs): + _helpers.verify_path(path, is_collection=True) + self._path = path + self._client = kwargs.pop("client", None) + if kwargs: + raise TypeError( + "Received unexpected arguments", kwargs, "Only `client` is supported" + ) + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return NotImplemented + return self._path == other._path and self._client == other._client + + @property + def id(self): + """The collection identifier. + + Returns: + str: The last component of the path. + """ + return self._path[-1] + + @property + def parent(self): + """Document that owns the current collection. + + Returns: + Optional[~.firestore_v1.document.DocumentReference]: The + parent document, if the current collection is not a + top-level collection. + """ + if len(self._path) == 1: + return None + else: + parent_path = self._path[:-1] + return self._client.document(*parent_path) + + def document(self, document_id=None): + """Create a sub-document underneath the current collection. + + Args: + document_id (Optional[str]): The document identifier + within the current collection. If not provided, will default + to a random 20 character string composed of digits, + uppercase and lowercase and letters. + + Returns: + ~.firestore_v1.document.DocumentReference: The child + document. + """ + if document_id is None: + document_id = _auto_id() + + child_path = self._path + (document_id,) + return self._client.document(*child_path) + + def _parent_info(self): + """Get fully-qualified parent path and prefix for this collection. + + Returns: + Tuple[str, str]: Pair of + + * the fully-qualified (with database and project) path to the + parent of this collection (will either be the database path + or a document path). + * the prefix to a document in this collection. + """ + parent_doc = self.parent + if parent_doc is None: + parent_path = _helpers.DOCUMENT_PATH_DELIMITER.join( + (self._client._database_string, "documents") + ) + else: + parent_path = parent_doc._document_path + + expected_prefix = _helpers.DOCUMENT_PATH_DELIMITER.join((parent_path, self.id)) + return parent_path, expected_prefix + + def add(self, document_data, document_id=None): + """Create a document in the Firestore database with the provided data. + + Args: + document_data (dict): Property names and values to use for + creating the document. + document_id (Optional[str]): The document identifier within the + current collection. If not provided, an ID will be + automatically assigned by the server (the assigned ID will be + a random 20 character string composed of digits, + uppercase and lowercase letters). + + Returns: + Tuple[google.protobuf.timestamp_pb2.Timestamp, \ + ~.firestore_v1.document.DocumentReference]: Pair of + + * The ``update_time`` when the document was created (or + overwritten). + * A document reference for the created document. + + Raises: + ~google.cloud.exceptions.Conflict: If ``document_id`` is provided + and the document already exists. + """ + if document_id is None: + parent_path, expected_prefix = self._parent_info() + + document_pb = document_pb2.Document() + + created_document_pb = self._client._firestore_api.create_document( + parent_path, + collection_id=self.id, + document_id=None, + document=document_pb, + mask=None, + metadata=self._client._rpc_metadata, + ) + + new_document_id = _helpers.get_doc_id(created_document_pb, expected_prefix) + document_ref = self.document(new_document_id) + set_result = document_ref.set(document_data) + return set_result.update_time, document_ref + else: + document_ref = self.document(document_id) + write_result = document_ref.create(document_data) + return write_result.update_time, document_ref + + def list_documents(self, page_size=None): + """List all subdocuments of the current collection. + + Args: + page_size (Optional[int]]): The maximum number of documents + in each page of results from this request. Non-positive values + are ignored. Defaults to a sensible value set by the API. + + Returns: + Sequence[~.firestore_v1.collection.DocumentReference]: + iterator of subdocuments of the current collection. If the + collection does not exist at the time of `snapshot`, the + iterator will be empty + """ + parent, _ = self._parent_info() + + iterator = self._client._firestore_api.list_documents( + parent, + self.id, + page_size=page_size, + show_missing=True, + metadata=self._client._rpc_metadata, + ) + iterator.collection = self + iterator.item_to_value = _item_to_document_ref + return iterator + + def select(self, field_paths): + """Create a "select" query with this collection as parent. + + See + :meth:`~.firestore_v1.query.Query.select` for + more information on this method. + + Args: + field_paths (Iterable[str, ...]): An iterable of field paths + (``.``-delimited list of field names) to use as a projection + of document fields in the query results. + + Returns: + ~.firestore_v1.query.Query: A "projected" query. + """ + query = query_mod.Query(self) + return query.select(field_paths) + + def where(self, field_path, op_string, value): + """Create a "where" query with this collection as parent. + + See + :meth:`~.firestore_v1.query.Query.where` for + more information on this method. + + Args: + field_path (str): A field path (``.``-delimited list of + field names) for the field to filter on. + op_string (str): A comparison operation in the form of a string. + Acceptable values are ``<``, ``<=``, ``==``, ``>=`` + and ``>``. + value (Any): The value to compare the field against in the filter. + If ``value`` is :data:`None` or a NaN, then ``==`` is the only + allowed operation. + + Returns: + ~.firestore_v1.query.Query: A filtered query. + """ + query = query_mod.Query(self) + return query.where(field_path, op_string, value) + + def order_by(self, field_path, **kwargs): + """Create an "order by" query with this collection as parent. + + See + :meth:`~.firestore_v1.query.Query.order_by` for + more information on this method. + + Args: + field_path (str): A field path (``.``-delimited list of + field names) on which to order the query results. + kwargs (Dict[str, Any]): The keyword arguments to pass along + to the query. The only supported keyword is ``direction``, + see :meth:`~.firestore_v1.query.Query.order_by` for + more information. + + Returns: + ~.firestore_v1.query.Query: An "order by" query. + """ + query = query_mod.Query(self) + return query.order_by(field_path, **kwargs) + + def limit(self, count): + """Create a limited query with this collection as parent. + + See + :meth:`~.firestore_v1.query.Query.limit` for + more information on this method. + + Args: + count (int): Maximum number of documents to return that match + the query. + + Returns: + ~.firestore_v1.query.Query: A limited query. + """ + query = query_mod.Query(self) + return query.limit(count) + + def offset(self, num_to_skip): + """Skip to an offset in a query with this collection as parent. + + See + :meth:`~.firestore_v1.query.Query.offset` for + more information on this method. + + Args: + num_to_skip (int): The number of results to skip at the beginning + of query results. (Must be non-negative.) + + Returns: + ~.firestore_v1.query.Query: An offset query. + """ + query = query_mod.Query(self) + return query.offset(num_to_skip) + + def start_at(self, document_fields): + """Start query at a cursor with this collection as parent. + + See + :meth:`~.firestore_v1.query.Query.start_at` for + more information on this method. + + Args: + document_fields (Union[~.firestore_v1.\ + document.DocumentSnapshot, dict, list, tuple]): a document + snapshot or a dictionary/list/tuple of fields representing a + query results cursor. A cursor is a collection of values that + represent a position in a query result set. + + Returns: + ~.firestore_v1.query.Query: A query with cursor. + """ + query = query_mod.Query(self) + return query.start_at(document_fields) + + def start_after(self, document_fields): + """Start query after a cursor with this collection as parent. + + See + :meth:`~.firestore_v1.query.Query.start_after` for + more information on this method. + + Args: + document_fields (Union[~.firestore_v1.\ + document.DocumentSnapshot, dict, list, tuple]): a document + snapshot or a dictionary/list/tuple of fields representing a + query results cursor. A cursor is a collection of values that + represent a position in a query result set. + + Returns: + ~.firestore_v1.query.Query: A query with cursor. + """ + query = query_mod.Query(self) + return query.start_after(document_fields) + + def end_before(self, document_fields): + """End query before a cursor with this collection as parent. + + See + :meth:`~.firestore_v1.query.Query.end_before` for + more information on this method. + + Args: + document_fields (Union[~.firestore_v1.\ + document.DocumentSnapshot, dict, list, tuple]): a document + snapshot or a dictionary/list/tuple of fields representing a + query results cursor. A cursor is a collection of values that + represent a position in a query result set. + + Returns: + ~.firestore_v1.query.Query: A query with cursor. + """ + query = query_mod.Query(self) + return query.end_before(document_fields) + + def end_at(self, document_fields): + """End query at a cursor with this collection as parent. + + See + :meth:`~.firestore_v1.query.Query.end_at` for + more information on this method. + + Args: + document_fields (Union[~.firestore_v1.\ + document.DocumentSnapshot, dict, list, tuple]): a document + snapshot or a dictionary/list/tuple of fields representing a + query results cursor. A cursor is a collection of values that + represent a position in a query result set. + + Returns: + ~.firestore_v1.query.Query: A query with cursor. + """ + query = query_mod.Query(self) + return query.end_at(document_fields) + + def get(self, transaction=None): + """Deprecated alias for :meth:`stream`.""" + warnings.warn( + "'Collection.get' is deprecated: please use 'Collection.stream' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.stream(transaction=transaction) + + def stream(self, transaction=None): + """Read the documents in this collection. + + This sends a ``RunQuery`` RPC and then returns an iterator which + consumes each document returned in the stream of ``RunQueryResponse`` + messages. + + .. note:: + + The underlying stream of responses will time out after + the ``max_rpc_timeout_millis`` value set in the GAPIC + client configuration for the ``RunQuery`` API. Snapshots + not consumed from the iterator before that point will be lost. + + If a ``transaction`` is used and it already has write operations + added, this method cannot be used (i.e. read-after-write is not + allowed). + + Args: + transaction (Optional[~.firestore_v1.transaction.\ + Transaction]): An existing transaction that the query will + run in. + + Yields: + ~.firestore_v1.document.DocumentSnapshot: The next + document that fulfills the query. + """ + query = query_mod.Query(self) + return query.stream(transaction=transaction) + + def on_snapshot(self, callback): + """Monitor the documents in this collection. + + This starts a watch on this collection using a background thread. The + provided callback is run on the snapshot of the documents. + + Args: + callback(~.firestore.collection.CollectionSnapshot): a callback + to run when a change occurs. + + Example: + from google.cloud import firestore_v1 + + db = firestore_v1.Client() + collection_ref = db.collection(u'users') + + def on_snapshot(collection_snapshot): + for doc in collection_snapshot.documents: + print(u'{} => {}'.format(doc.id, doc.to_dict())) + + # Watch this collection + collection_watch = collection_ref.on_snapshot(on_snapshot) + + # Terminate this watch + collection_watch.unsubscribe() + """ + return Watch.for_query( + query_mod.Query(self), + callback, + document.DocumentSnapshot, + document.DocumentReference, + ) + + +def _auto_id(): + """Generate a "random" automatically generated ID. + + Returns: + str: A 20 character string composed of digits, uppercase and + lowercase and letters. + """ + return "".join(random.choice(_AUTO_ID_CHARS) for _ in six.moves.xrange(20)) + + +def _item_to_document_ref(iterator, item): + """Convert Document resource to document ref. + + Args: + iterator (google.api_core.page_iterator.GRPCIterator): + iterator response + item (dict): document resource + """ + document_id = item.name.split(_helpers.DOCUMENT_PATH_DELIMITER)[-1] + return iterator.collection.document(document_id) diff --git a/firestore/google/cloud/firestore_v1/document.py b/firestore/google/cloud/firestore_v1/document.py new file mode 100644 index 000000000000..6843aefa1383 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/document.py @@ -0,0 +1,780 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Classes for representing documents for the Google Cloud Firestore API.""" + +import copy + +import six + +from google.api_core import exceptions +from google.cloud.firestore_v1 import _helpers +from google.cloud.firestore_v1 import field_path as field_path_module +from google.cloud.firestore_v1.proto import common_pb2 +from google.cloud.firestore_v1.watch import Watch + + +class DocumentReference(object): + """A reference to a document in a Firestore database. + + The document may already exist or can be created by this class. + + Args: + path (Tuple[str, ...]): The components in the document path. + This is a series of strings representing each collection and + sub-collection ID, as well as the document IDs for any documents + that contain a sub-collection (as well as the base document). + kwargs (dict): The keyword arguments for the constructor. The only + supported keyword is ``client`` and it must be a + :class:`~.firestore_v1.client.Client`. It represents + the client that created this document reference. + + Raises: + ValueError: if + + * the ``path`` is empty + * there are an even number of elements + * a collection ID in ``path`` is not a string + * a document ID in ``path`` is not a string + TypeError: If a keyword other than ``client`` is used. + """ + + _document_path_internal = None + + def __init__(self, *path, **kwargs): + _helpers.verify_path(path, is_collection=False) + self._path = path + self._client = kwargs.pop("client", None) + if kwargs: + raise TypeError( + "Received unexpected arguments", kwargs, "Only `client` is supported" + ) + + def __copy__(self): + """Shallow copy the instance. + + We leave the client "as-is" but tuple-unpack the path. + + Returns: + .DocumentReference: A copy of the current document. + """ + result = self.__class__(*self._path, client=self._client) + result._document_path_internal = self._document_path_internal + return result + + def __deepcopy__(self, unused_memo): + """Deep copy the instance. + + This isn't a true deep copy, wee leave the client "as-is" but + tuple-unpack the path. + + Returns: + .DocumentReference: A copy of the current document. + """ + return self.__copy__() + + def __eq__(self, other): + """Equality check against another instance. + + Args: + other (Any): A value to compare against. + + Returns: + Union[bool, NotImplementedType]: Indicating if the values are + equal. + """ + if isinstance(other, DocumentReference): + return self._client == other._client and self._path == other._path + else: + return NotImplemented + + def __hash__(self): + return hash(self._path) + hash(self._client) + + def __ne__(self, other): + """Inequality check against another instance. + + Args: + other (Any): A value to compare against. + + Returns: + Union[bool, NotImplementedType]: Indicating if the values are + not equal. + """ + if isinstance(other, DocumentReference): + return self._client != other._client or self._path != other._path + else: + return NotImplemented + + @property + def path(self): + """Database-relative for this document. + + Returns: + str: The document's relative path. + """ + return "/".join(self._path) + + @property + def _document_path(self): + """Create and cache the full path for this document. + + Of the form: + + ``projects/{project_id}/databases/{database_id}/... + documents/{document_path}`` + + Returns: + str: The full document path. + + Raises: + ValueError: If the current document reference has no ``client``. + """ + if self._document_path_internal is None: + if self._client is None: + raise ValueError("A document reference requires a `client`.") + self._document_path_internal = _get_document_path(self._client, self._path) + + return self._document_path_internal + + @property + def id(self): + """The document identifier (within its collection). + + Returns: + str: The last component of the path. + """ + return self._path[-1] + + @property + def parent(self): + """Collection that owns the current document. + + Returns: + ~.firestore_v1.collection.CollectionReference: The + parent collection. + """ + parent_path = self._path[:-1] + return self._client.collection(*parent_path) + + def collection(self, collection_id): + """Create a sub-collection underneath the current document. + + Args: + collection_id (str): The sub-collection identifier (sometimes + referred to as the "kind"). + + Returns: + ~.firestore_v1.collection.CollectionReference: The + child collection. + """ + child_path = self._path + (collection_id,) + return self._client.collection(*child_path) + + def create(self, document_data): + """Create the current document in the Firestore database. + + Args: + document_data (dict): Property names and values to use for + creating a document. + + Returns: + google.cloud.firestore_v1.types.WriteResult: The + write result corresponding to the committed document. A write + result contains an ``update_time`` field. + + Raises: + ~google.cloud.exceptions.Conflict: If the document already exists. + """ + batch = self._client.batch() + batch.create(self, document_data) + write_results = batch.commit() + return _first_write_result(write_results) + + def set(self, document_data, merge=False): + """Replace the current document in the Firestore database. + + A write ``option`` can be specified to indicate preconditions of + the "set" operation. If no ``option`` is specified and this document + doesn't exist yet, this method will create it. + + Overwrites all content for the document with the fields in + ``document_data``. This method performs almost the same functionality + as :meth:`create`. The only difference is that this method doesn't + make any requirements on the existence of the document (unless + ``option`` is used), whereas as :meth:`create` will fail if the + document already exists. + + Args: + document_data (dict): Property names and values to use for + replacing a document. + merge (Optional[bool] or Optional[List]): + If True, apply merging instead of overwriting the state + of the document. + + Returns: + google.cloud.firestore_v1.types.WriteResult: The + write result corresponding to the committed document. A write + result contains an ``update_time`` field. + """ + batch = self._client.batch() + batch.set(self, document_data, merge=merge) + write_results = batch.commit() + return _first_write_result(write_results) + + def update(self, field_updates, option=None): + """Update an existing document in the Firestore database. + + By default, this method verifies that the document exists on the + server before making updates. A write ``option`` can be specified to + override these preconditions. + + Each key in ``field_updates`` can either be a field name or a + **field path** (For more information on **field paths**, see + :meth:`~.firestore_v1.client.Client.field_path`.) To + illustrate this, consider a document with + + .. code-block:: python + + >>> snapshot = document.get() + >>> snapshot.to_dict() + { + 'foo': { + 'bar': 'baz', + }, + 'other': True, + } + + stored on the server. If the field name is used in the update: + + .. code-block:: python + + >>> field_updates = { + ... 'foo': { + ... 'quux': 800, + ... }, + ... } + >>> document.update(field_updates) + + then all of ``foo`` will be overwritten on the server and the new + value will be + + .. code-block:: python + + >>> snapshot = document.get() + >>> snapshot.to_dict() + { + 'foo': { + 'quux': 800, + }, + 'other': True, + } + + On the other hand, if a ``.``-delimited **field path** is used in the + update: + + .. code-block:: python + + >>> field_updates = { + ... 'foo.quux': 800, + ... } + >>> document.update(field_updates) + + then only ``foo.quux`` will be updated on the server and the + field ``foo.bar`` will remain intact: + + .. code-block:: python + + >>> snapshot = document.get() + >>> snapshot.to_dict() + { + 'foo': { + 'bar': 'baz', + 'quux': 800, + }, + 'other': True, + } + + .. warning:: + + A **field path** can only be used as a top-level key in + ``field_updates``. + + To delete / remove a field from an existing document, use the + :attr:`~.firestore_v1.transforms.DELETE_FIELD` sentinel. So + with the example above, sending + + .. code-block:: python + + >>> field_updates = { + ... 'other': firestore.DELETE_FIELD, + ... } + >>> document.update(field_updates) + + would update the value on the server to: + + .. code-block:: python + + >>> snapshot = document.get() + >>> snapshot.to_dict() + { + 'foo': { + 'bar': 'baz', + }, + } + + To set a field to the current time on the server when the + update is received, use the + :attr:`~.firestore_v1.transforms.SERVER_TIMESTAMP` sentinel. + Sending + + .. code-block:: python + + >>> field_updates = { + ... 'foo.now': firestore.SERVER_TIMESTAMP, + ... } + >>> document.update(field_updates) + + would update the value on the server to: + + .. code-block:: python + + >>> snapshot = document.get() + >>> snapshot.to_dict() + { + 'foo': { + 'bar': 'baz', + 'now': datetime.datetime(2012, ...), + }, + 'other': True, + } + + Args: + field_updates (dict): Field names or paths to update and values + to update with. + option (Optional[~.firestore_v1.client.WriteOption]): A + write option to make assertions / preconditions on the server + state of the document before applying changes. + + Returns: + google.cloud.firestore_v1.types.WriteResult: The + write result corresponding to the updated document. A write + result contains an ``update_time`` field. + + Raises: + ~google.cloud.exceptions.NotFound: If the document does not exist. + """ + batch = self._client.batch() + batch.update(self, field_updates, option=option) + write_results = batch.commit() + return _first_write_result(write_results) + + def delete(self, option=None): + """Delete the current document in the Firestore database. + + Args: + option (Optional[~.firestore_v1.client.WriteOption]): A + write option to make assertions / preconditions on the server + state of the document before applying changes. + + Returns: + google.protobuf.timestamp_pb2.Timestamp: The time that the delete + request was received by the server. If the document did not exist + when the delete was sent (i.e. nothing was deleted), this method + will still succeed and will still return the time that the + request was received by the server. + """ + write_pb = _helpers.pb_for_delete(self._document_path, option) + commit_response = self._client._firestore_api.commit( + self._client._database_string, + [write_pb], + transaction=None, + metadata=self._client._rpc_metadata, + ) + + return commit_response.commit_time + + def get(self, field_paths=None, transaction=None): + """Retrieve a snapshot of the current document. + + See :meth:`~.firestore_v1.client.Client.field_path` for + more information on **field paths**. + + If a ``transaction`` is used and it already has write operations + added, this method cannot be used (i.e. read-after-write is not + allowed). + + Args: + field_paths (Optional[Iterable[str, ...]]): An iterable of field + paths (``.``-delimited list of field names) to use as a + projection of document fields in the returned results. If + no value is provided, all fields will be returned. + transaction (Optional[~.firestore_v1.transaction.\ + Transaction]): An existing transaction that this reference + will be retrieved in. + + Returns: + ~.firestore_v1.document.DocumentSnapshot: A snapshot of + the current document. If the document does not exist at + the time of `snapshot`, the snapshot `reference`, `data`, + `update_time`, and `create_time` attributes will all be + `None` and `exists` will be `False`. + """ + if isinstance(field_paths, six.string_types): + raise ValueError("'field_paths' must be a sequence of paths, not a string.") + + if field_paths is not None: + mask = common_pb2.DocumentMask(field_paths=sorted(field_paths)) + else: + mask = None + + firestore_api = self._client._firestore_api + try: + document_pb = firestore_api.get_document( + self._document_path, + mask=mask, + transaction=_helpers.get_transaction_id(transaction), + metadata=self._client._rpc_metadata, + ) + except exceptions.NotFound: + data = None + exists = False + create_time = None + update_time = None + else: + data = _helpers.decode_dict(document_pb.fields, self._client) + exists = True + create_time = document_pb.create_time + update_time = document_pb.update_time + + return DocumentSnapshot( + reference=self, + data=data, + exists=exists, + read_time=None, # No server read_time available + create_time=create_time, + update_time=update_time, + ) + + def collections(self, page_size=None): + """List subcollections of the current document. + + Args: + page_size (Optional[int]]): The maximum number of collections + in each page of results from this request. Non-positive values + are ignored. Defaults to a sensible value set by the API. + + Returns: + Sequence[~.firestore_v1.collection.CollectionReference]: + iterator of subcollections of the current document. If the + document does not exist at the time of `snapshot`, the + iterator will be empty + """ + iterator = self._client._firestore_api.list_collection_ids( + self._document_path, + page_size=page_size, + metadata=self._client._rpc_metadata, + ) + iterator.document = self + iterator.item_to_value = _item_to_collection_ref + return iterator + + def on_snapshot(self, callback): + """Watch this document. + + This starts a watch on this document using a background thread. The + provided callback is run on the snapshot. + + Args: + callback(~.firestore.document.DocumentSnapshot):a callback to run + when a change occurs + + Example: + from google.cloud import firestore_v1 + + db = firestore_v1.Client() + collection_ref = db.collection(u'users') + + def on_snapshot(document_snapshot): + doc = document_snapshot + print(u'{} => {}'.format(doc.id, doc.to_dict())) + + doc_ref = db.collection(u'users').document( + u'alovelace' + unique_resource_id()) + + # Watch this document + doc_watch = doc_ref.on_snapshot(on_snapshot) + + # Terminate this watch + doc_watch.unsubscribe() + """ + return Watch.for_document(self, callback, DocumentSnapshot, DocumentReference) + + +class DocumentSnapshot(object): + """A snapshot of document data in a Firestore database. + + This represents data retrieved at a specific time and may not contain + all fields stored for the document (i.e. a hand-picked selection of + fields may have been retrieved). + + Instances of this class are not intended to be constructed by hand, + rather they'll be returned as responses to various methods, such as + :meth:`~.DocumentReference.get`. + + Args: + reference (~.firestore_v1.document.DocumentReference): A + document reference corresponding to the document that contains + the data in this snapshot. + data (Dict[str, Any]): The data retrieved in the snapshot. + exists (bool): Indicates if the document existed at the time the + snapshot was retrieved. + read_time (google.protobuf.timestamp_pb2.Timestamp): The time that + this snapshot was read from the server. + create_time (google.protobuf.timestamp_pb2.Timestamp): The time that + this document was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): The time that + this document was last updated. + """ + + def __init__(self, reference, data, exists, read_time, create_time, update_time): + self._reference = reference + # We want immutable data, so callers can't modify this value + # out from under us. + self._data = copy.deepcopy(data) + self._exists = exists + self.read_time = read_time + """google.protobuf.timestamp_pb2.Timestamp: Time snapshot was read.""" + self.create_time = create_time + """google.protobuf.timestamp_pb2.Timestamp: Document's creation.""" + self.update_time = update_time + """google.protobuf.timestamp_pb2.Timestamp: Document's last update.""" + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return NotImplemented + return self._reference == other._reference and self._data == other._data + + def __hash__(self): + seconds = self.update_time.seconds + nanos = self.update_time.nanos + return hash(self._reference) + hash(seconds) + hash(nanos) + + @property + def _client(self): + """The client that owns the document reference for this snapshot. + + Returns: + ~.firestore_v1.client.Client: The client that owns this + document. + """ + return self._reference._client + + @property + def exists(self): + """Existence flag. + + Indicates if the document existed at the time this snapshot + was retrieved. + + Returns: + bool: The existence flag. + """ + return self._exists + + @property + def id(self): + """The document identifier (within its collection). + + Returns: + str: The last component of the path of the document. + """ + return self._reference.id + + @property + def reference(self): + """Document reference corresponding to document that owns this data. + + Returns: + ~.firestore_v1.document.DocumentReference: A document + reference corresponding to this document. + """ + return self._reference + + def get(self, field_path): + """Get a value from the snapshot data. + + If the data is nested, for example: + + .. code-block:: python + + >>> snapshot.to_dict() + { + 'top1': { + 'middle2': { + 'bottom3': 20, + 'bottom4': 22, + }, + 'middle5': True, + }, + 'top6': b'\x00\x01 foo', + } + + a **field path** can be used to access the nested data. For + example: + + .. code-block:: python + + >>> snapshot.get('top1') + { + 'middle2': { + 'bottom3': 20, + 'bottom4': 22, + }, + 'middle5': True, + } + >>> snapshot.get('top1.middle2') + { + 'bottom3': 20, + 'bottom4': 22, + } + >>> snapshot.get('top1.middle2.bottom3') + 20 + + See :meth:`~.firestore_v1.client.Client.field_path` for + more information on **field paths**. + + A copy is returned since the data may contain mutable values, + but the data stored in the snapshot must remain immutable. + + Args: + field_path (str): A field path (``.``-delimited list of + field names). + + Returns: + Any or None: + (A copy of) the value stored for the ``field_path`` or + None if snapshot document does not exist. + + Raises: + KeyError: If the ``field_path`` does not match nested data + in the snapshot. + """ + if not self._exists: + return None + nested_data = field_path_module.get_nested_value(field_path, self._data) + return copy.deepcopy(nested_data) + + def to_dict(self): + """Retrieve the data contained in this snapshot. + + A copy is returned since the data may contain mutable values, + but the data stored in the snapshot must remain immutable. + + Returns: + Dict[str, Any] or None: + The data in the snapshot. Returns None if reference + does not exist. + """ + if not self._exists: + return None + return copy.deepcopy(self._data) + + +def _get_document_path(client, path): + """Convert a path tuple into a full path string. + + Of the form: + + ``projects/{project_id}/databases/{database_id}/... + documents/{document_path}`` + + Args: + client (~.firestore_v1.client.Client): The client that holds + configuration details and a GAPIC client object. + path (Tuple[str, ...]): The components in a document path. + + Returns: + str: The fully-qualified document path. + """ + parts = (client._database_string, "documents") + path + return _helpers.DOCUMENT_PATH_DELIMITER.join(parts) + + +def _consume_single_get(response_iterator): + """Consume a gRPC stream that should contain a single response. + + The stream will correspond to a ``BatchGetDocuments`` request made + for a single document. + + Args: + response_iterator (~google.cloud.exceptions.GrpcRendezvous): A + streaming iterator returned from a ``BatchGetDocuments`` + request. + + Returns: + ~google.cloud.proto.firestore.v1.\ + firestore_pb2.BatchGetDocumentsResponse: The single "get" + response in the batch. + + Raises: + ValueError: If anything other than exactly one response is returned. + """ + # Calling ``list()`` consumes the entire iterator. + all_responses = list(response_iterator) + if len(all_responses) != 1: + raise ValueError( + "Unexpected response from `BatchGetDocumentsResponse`", + all_responses, + "Expected only one result", + ) + + return all_responses[0] + + +def _first_write_result(write_results): + """Get first write result from list. + + For cases where ``len(write_results) > 1``, this assumes the writes + occurred at the same time (e.g. if an update and transform are sent + at the same time). + + Args: + write_results (List[google.cloud.proto.firestore.v1.\ + write_pb2.WriteResult, ...]: The write results from a + ``CommitResponse``. + + Returns: + google.cloud.firestore_v1.types.WriteResult: The + lone write result from ``write_results``. + + Raises: + ValueError: If there are zero write results. This is likely to + **never** occur, since the backend should be stable. + """ + if not write_results: + raise ValueError("Expected at least one write result") + + return write_results[0] + + +def _item_to_collection_ref(iterator, item): + """Convert collection ID to collection ref. + + Args: + iterator (google.api_core.page_iterator.GRPCIterator): + iterator response + item (str): ID of the collection + """ + return iterator.document.collection(item) diff --git a/firestore/google/cloud/firestore_v1/field_path.py b/firestore/google/cloud/firestore_v1/field_path.py new file mode 100644 index 000000000000..bba237ee2449 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/field_path.py @@ -0,0 +1,386 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for managing / converting field paths to / from strings.""" + +try: + from collections import abc as collections_abc +except ImportError: # Python 2.7 + import collections as collections_abc + +import re + +import six + + +_FIELD_PATH_MISSING_TOP = "{!r} is not contained in the data" +_FIELD_PATH_MISSING_KEY = "{!r} is not contained in the data for the key {!r}" +_FIELD_PATH_WRONG_TYPE = ( + "The data at {!r} is not a dictionary, so it cannot contain the key {!r}" +) + +_FIELD_PATH_DELIMITER = "." +_BACKSLASH = "\\" +_ESCAPED_BACKSLASH = _BACKSLASH * 2 +_BACKTICK = "`" +_ESCAPED_BACKTICK = _BACKSLASH + _BACKTICK + +_SIMPLE_FIELD_NAME = re.compile("^[_a-zA-Z][_a-zA-Z0-9]*$") +_LEADING_ALPHA_INVALID = re.compile("^[_a-zA-Z][_a-zA-Z0-9]*[^_a-zA-Z0-9]") +PATH_ELEMENT_TOKENS = [ + ("SIMPLE", r"[_a-zA-Z][_a-zA-Z0-9]*"), # unquoted elements + ("QUOTED", r"`(?:\\`|[^`])*?`"), # quoted elements, unquoted + ("DOT", r"\."), # separator +] +TOKENS_PATTERN = "|".join("(?P<{}>{})".format(*pair) for pair in PATH_ELEMENT_TOKENS) +TOKENS_REGEX = re.compile(TOKENS_PATTERN) + + +def _tokenize_field_path(path): + """Lex a field path into tokens (including dots). + + Args: + path (str): field path to be lexed. + Returns: + List(str): tokens + """ + pos = 0 + get_token = TOKENS_REGEX.match + match = get_token(path) + while match is not None: + type_ = match.lastgroup + value = match.group(type_) + yield value + pos = match.end() + match = get_token(path, pos) + if pos != len(path): + raise ValueError("Path {} not consumed, residue: {}".format(path, path[pos:])) + + +def split_field_path(path): + """Split a field path into valid elements (without dots). + + Args: + path (str): field path to be lexed. + Returns: + List(str): tokens + Raises: + ValueError: if the path does not match the elements-interspersed- + with-dots pattern. + """ + if not path: + return [] + + elements = [] + want_dot = False + + for element in _tokenize_field_path(path): + if want_dot: + if element != ".": + raise ValueError("Invalid path: {}".format(path)) + else: + want_dot = False + else: + if element == ".": + raise ValueError("Invalid path: {}".format(path)) + elements.append(element) + want_dot = True + + if not want_dot or not elements: + raise ValueError("Invalid path: {}".format(path)) + + return elements + + +def parse_field_path(api_repr): + """Parse a **field path** from into a list of nested field names. + + See :func:`field_path` for more on **field paths**. + + Args: + api_repr (str): + The unique Firestore api representation which consists of + either simple or UTF-8 field names. It cannot exceed + 1500 bytes, and cannot be empty. Simple field names match + ``'^[_a-zA-Z][_a-zA-Z0-9]*$'``. All other field names are + escaped by surrounding them with backticks. + + Returns: + List[str, ...]: The list of field names in the field path. + """ + # code dredged back up from + # https://github.com/googleapis/google-cloud-python/pull/5109/files + field_names = [] + for field_name in split_field_path(api_repr): + # non-simple field name + if field_name[0] == "`" and field_name[-1] == "`": + field_name = field_name[1:-1] + field_name = field_name.replace(_ESCAPED_BACKTICK, _BACKTICK) + field_name = field_name.replace(_ESCAPED_BACKSLASH, _BACKSLASH) + field_names.append(field_name) + return field_names + + +def render_field_path(field_names): + """Create a **field path** from a list of nested field names. + + A **field path** is a ``.``-delimited concatenation of the field + names. It is used to represent a nested field. For example, + in the data + + .. code-block: python + + data = { + 'aa': { + 'bb': { + 'cc': 10, + }, + }, + } + + the field path ``'aa.bb.cc'`` represents that data stored in + ``data['aa']['bb']['cc']``. + + Args: + field_names (Iterable[str, ...]): The list of field names. + + Returns: + str: The ``.``-delimited field path. + """ + result = [] + + for field_name in field_names: + match = _SIMPLE_FIELD_NAME.match(field_name) + if match and match.group(0) == field_name: + result.append(field_name) + else: + replaced = field_name.replace(_BACKSLASH, _ESCAPED_BACKSLASH).replace( + _BACKTICK, _ESCAPED_BACKTICK + ) + result.append(_BACKTICK + replaced + _BACKTICK) + + return _FIELD_PATH_DELIMITER.join(result) + + +get_field_path = render_field_path # backward-compatibility + + +def get_nested_value(field_path, data): + """Get a (potentially nested) value from a dictionary. + + If the data is nested, for example: + + .. code-block:: python + + >>> data + { + 'top1': { + 'middle2': { + 'bottom3': 20, + 'bottom4': 22, + }, + 'middle5': True, + }, + 'top6': b'\x00\x01 foo', + } + + a **field path** can be used to access the nested data. For + example: + + .. code-block:: python + + >>> get_nested_value('top1', data) + { + 'middle2': { + 'bottom3': 20, + 'bottom4': 22, + }, + 'middle5': True, + } + >>> get_nested_value('top1.middle2', data) + { + 'bottom3': 20, + 'bottom4': 22, + } + >>> get_nested_value('top1.middle2.bottom3', data) + 20 + + See :meth:`~.firestore_v1.client.Client.field_path` for + more information on **field paths**. + + Args: + field_path (str): A field path (``.``-delimited list of + field names). + data (Dict[str, Any]): The (possibly nested) data. + + Returns: + Any: (A copy of) the value stored for the ``field_path``. + + Raises: + KeyError: If the ``field_path`` does not match nested data. + """ + field_names = parse_field_path(field_path) + + nested_data = data + for index, field_name in enumerate(field_names): + if isinstance(nested_data, collections_abc.Mapping): + if field_name in nested_data: + nested_data = nested_data[field_name] + else: + if index == 0: + msg = _FIELD_PATH_MISSING_TOP.format(field_name) + raise KeyError(msg) + else: + partial = render_field_path(field_names[:index]) + msg = _FIELD_PATH_MISSING_KEY.format(field_name, partial) + raise KeyError(msg) + else: + partial = render_field_path(field_names[:index]) + msg = _FIELD_PATH_WRONG_TYPE.format(partial, field_name) + raise KeyError(msg) + + return nested_data + + +class FieldPath(object): + """Field Path object for client use. + + A field path is a sequence of element keys, separated by periods. + Each element key can be either a simple identifier, or a full unicode + string. + + In the string representation of a field path, non-identifier elements + must be quoted using backticks, with internal backticks and backslashes + escaped with a backslash. + + Args: + parts: (one or more strings) + Indicating path of the key to be used. + """ + + def __init__(self, *parts): + for part in parts: + if not isinstance(part, six.string_types) or not part: + error = "One or more components is not a string or is empty." + raise ValueError(error) + self.parts = tuple(parts) + + @classmethod + def from_api_repr(cls, api_repr): + """Factory: create a FieldPath from the string formatted per the API. + + Args: + api_repr (str): a string path, with non-identifier elements quoted + It cannot exceed 1500 characters, and cannot be empty. + Returns: + (:class:`FieldPath`) An instance parsed from ``api_repr``. + Raises: + ValueError if the parsing fails + """ + api_repr = api_repr.strip() + if not api_repr: + raise ValueError("Field path API representation cannot be empty.") + return cls(*parse_field_path(api_repr)) + + @classmethod + def from_string(cls, path_string): + """Factory: create a FieldPath from a unicode string representation. + + This method splits on the character `.` and disallows the + characters `~*/[]`. To create a FieldPath whose components have + those characters, call the constructor. + + Args: + path_string (str): A unicode string which cannot contain + `~*/[]` characters, cannot exceed 1500 bytes, and cannot be empty. + + Returns: + (:class:`FieldPath`) An instance parsed from ``path_string``. + """ + try: + return cls.from_api_repr(path_string) + except ValueError: + elements = path_string.split(".") + for element in elements: + if not element: + raise ValueError("Empty element") + if _LEADING_ALPHA_INVALID.match(element): + raise ValueError( + "Non-alphanum char in element with leading alpha: {}".format( + element + ) + ) + return FieldPath(*elements) + + def __repr__(self): + paths = "" + for part in self.parts: + paths += "'" + part + "'," + paths = paths[:-1] + return "FieldPath({})".format(paths) + + def __hash__(self): + return hash(self.to_api_repr()) + + def __eq__(self, other): + if isinstance(other, FieldPath): + return self.parts == other.parts + return NotImplemented + + def __lt__(self, other): + if isinstance(other, FieldPath): + return self.parts < other.parts + return NotImplemented + + def __add__(self, other): + """Adds `other` field path to end of this field path. + + Args: + other (~google.cloud.firestore_v1._helpers.FieldPath, str): + The field path to add to the end of this `FieldPath`. + """ + if isinstance(other, FieldPath): + parts = self.parts + other.parts + return FieldPath(*parts) + elif isinstance(other, six.string_types): + parts = self.parts + FieldPath.from_string(other).parts + return FieldPath(*parts) + else: + return NotImplemented + + def to_api_repr(self): + """Render a quoted string representation of the FieldPath + + Returns: + (str) Quoted string representation of the path stored + within this FieldPath. + """ + return render_field_path(self.parts) + + def eq_or_parent(self, other): + """Check whether ``other`` is an ancestor. + + Returns: + (bool) True IFF ``other`` is an ancestor or equal to ``self``, + else False. + """ + return self.parts[: len(other.parts)] == other.parts[: len(self.parts)] + + def lineage(self): + """Return field paths for all parents. + + Returns: Set[:class:`FieldPath`] + """ + indexes = six.moves.range(1, len(self.parts)) + return {FieldPath(*self.parts[:index]) for index in indexes} diff --git a/firestore/google/cloud/firestore_v1/order.py b/firestore/google/cloud/firestore_v1/order.py new file mode 100644 index 000000000000..d70293a36a5d --- /dev/null +++ b/firestore/google/cloud/firestore_v1/order.py @@ -0,0 +1,207 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from enum import Enum +from google.cloud.firestore_v1._helpers import decode_value +import math + + +class TypeOrder(Enum): + # NOTE: This order is defined by the backend and cannot be changed. + NULL = 0 + BOOLEAN = 1 + NUMBER = 2 + TIMESTAMP = 3 + STRING = 4 + BLOB = 5 + REF = 6 + GEO_POINT = 7 + ARRAY = 8 + OBJECT = 9 + + @staticmethod + def from_value(value): + v = value.WhichOneof("value_type") + + lut = { + "null_value": TypeOrder.NULL, + "boolean_value": TypeOrder.BOOLEAN, + "integer_value": TypeOrder.NUMBER, + "double_value": TypeOrder.NUMBER, + "timestamp_value": TypeOrder.TIMESTAMP, + "string_value": TypeOrder.STRING, + "bytes_value": TypeOrder.BLOB, + "reference_value": TypeOrder.REF, + "geo_point_value": TypeOrder.GEO_POINT, + "array_value": TypeOrder.ARRAY, + "map_value": TypeOrder.OBJECT, + } + + if v not in lut: + raise ValueError("Could not detect value type for " + v) + return lut[v] + + +class Order(object): + """ + Order implements the ordering semantics of the backend. + """ + + @classmethod + def compare(cls, left, right): + """ + Main comparison function for all Firestore types. + @return -1 is left < right, 0 if left == right, otherwise 1 + """ + # First compare the types. + leftType = TypeOrder.from_value(left).value + rightType = TypeOrder.from_value(right).value + + if leftType != rightType: + if leftType < rightType: + return -1 + return 1 + + value_type = left.WhichOneof("value_type") + + if value_type == "null_value": + return 0 # nulls are all equal + elif value_type == "boolean_value": + return cls._compare_to(left.boolean_value, right.boolean_value) + elif value_type == "integer_value": + return cls.compare_numbers(left, right) + elif value_type == "double_value": + return cls.compare_numbers(left, right) + elif value_type == "timestamp_value": + return cls.compare_timestamps(left, right) + elif value_type == "string_value": + return cls._compare_to(left.string_value, right.string_value) + elif value_type == "bytes_value": + return cls.compare_blobs(left, right) + elif value_type == "reference_value": + return cls.compare_resource_paths(left, right) + elif value_type == "geo_point_value": + return cls.compare_geo_points(left, right) + elif value_type == "array_value": + return cls.compare_arrays(left, right) + elif value_type == "map_value": + return cls.compare_objects(left, right) + else: + raise ValueError("Unknown ``value_type``", str(value_type)) + + @staticmethod + def compare_blobs(left, right): + left_bytes = left.bytes_value + right_bytes = right.bytes_value + + return Order._compare_to(left_bytes, right_bytes) + + @staticmethod + def compare_timestamps(left, right): + left = left.timestamp_value + right = right.timestamp_value + + seconds = Order._compare_to(left.seconds or 0, right.seconds or 0) + if seconds != 0: + return seconds + + return Order._compare_to(left.nanos or 0, right.nanos or 0) + + @staticmethod + def compare_geo_points(left, right): + left_value = decode_value(left, None) + right_value = decode_value(right, None) + cmp = (left_value.latitude > right_value.latitude) - ( + left_value.latitude < right_value.latitude + ) + + if cmp != 0: + return cmp + return (left_value.longitude > right_value.longitude) - ( + left_value.longitude < right_value.longitude + ) + + @staticmethod + def compare_resource_paths(left, right): + left = left.reference_value + right = right.reference_value + + left_segments = left.split("/") + right_segments = right.split("/") + shorter = min(len(left_segments), len(right_segments)) + # compare segments + for i in range(shorter): + if left_segments[i] < right_segments[i]: + return -1 + if left_segments[i] > right_segments[i]: + return 1 + + left_length = len(left) + right_length = len(right) + return (left_length > right_length) - (left_length < right_length) + + @staticmethod + def compare_arrays(left, right): + l_values = left.array_value.values + r_values = right.array_value.values + + length = min(len(l_values), len(r_values)) + for i in range(length): + cmp = Order.compare(l_values[i], r_values[i]) + if cmp != 0: + return cmp + + return Order._compare_to(len(l_values), len(r_values)) + + @staticmethod + def compare_objects(left, right): + left_fields = left.map_value.fields + right_fields = right.map_value.fields + + for left_key, right_key in zip(sorted(left_fields), sorted(right_fields)): + keyCompare = Order._compare_to(left_key, right_key) + if keyCompare != 0: + return keyCompare + + value_compare = Order.compare( + left_fields[left_key], right_fields[right_key] + ) + if value_compare != 0: + return value_compare + + return Order._compare_to(len(left_fields), len(right_fields)) + + @staticmethod + def compare_numbers(left, right): + left_value = decode_value(left, None) + right_value = decode_value(right, None) + return Order.compare_doubles(left_value, right_value) + + @staticmethod + def compare_doubles(left, right): + if math.isnan(left): + if math.isnan(right): + return 0 + return -1 + if math.isnan(right): + return 1 + + return Order._compare_to(left, right) + + @staticmethod + def _compare_to(left, right): + # We can't just use cmp(left, right) because cmp doesn't exist + # in Python 3, so this is an equivalent suggested by + # https://docs.python.org/3.0/whatsnew/3.0.html#ordering-comparisons + return (left > right) - (left < right) diff --git a/firestore/google/cloud/firestore_v1/proto/test_v1_pb2.py b/firestore/google/cloud/firestore_v1/proto/test_v1_pb2.py new file mode 100644 index 000000000000..336bab948414 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/proto/test_v1_pb2.py @@ -0,0 +1,2190 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: test_v1.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.cloud.firestore_v1.proto import ( + common_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2, +) +from google.cloud.firestore_v1.proto import ( + document_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2, +) +from google.cloud.firestore_v1.proto import ( + firestore_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2, +) +from google.cloud.firestore_v1.proto import ( + query_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2, +) +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="test_v1.proto", + package="tests.v1", + syntax="proto3", + serialized_pb=_b( + '\n\rtest_v1.proto\x12\x08tests.v1\x1a,google/cloud/firestore_v1/proto/common.proto\x1a.google/cloud/firestore_v1/proto/document.proto\x1a/google/cloud/firestore_v1/proto/firestore.proto\x1a+google/cloud/firestore_v1/proto/query.proto\x1a\x1fgoogle/protobuf/timestamp.proto"*\n\tTestSuite\x12\x1d\n\x05tests\x18\x01 \x03(\x0b\x32\x0e.tests.v1.Test"\xe0\x02\n\x04Test\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12 \n\x03get\x18\x02 \x01(\x0b\x32\x11.tests.v1.GetTestH\x00\x12&\n\x06\x63reate\x18\x03 \x01(\x0b\x32\x14.tests.v1.CreateTestH\x00\x12 \n\x03set\x18\x04 \x01(\x0b\x32\x11.tests.v1.SetTestH\x00\x12&\n\x06update\x18\x05 \x01(\x0b\x32\x14.tests.v1.UpdateTestH\x00\x12\x31\n\x0cupdate_paths\x18\x06 \x01(\x0b\x32\x19.tests.v1.UpdatePathsTestH\x00\x12&\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x14.tests.v1.DeleteTestH\x00\x12$\n\x05query\x18\x08 \x01(\x0b\x32\x13.tests.v1.QueryTestH\x00\x12&\n\x06listen\x18\t \x01(\x0b\x32\x14.tests.v1.ListenTestH\x00\x42\x06\n\x04test"Y\n\x07GetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x38\n\x07request\x18\x02 \x01(\x0b\x32\'.google.firestore.v1.GetDocumentRequest"|\n\nCreateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t\x12\x33\n\x07request\x18\x03 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\x9e\x01\n\x07SetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12#\n\x06option\x18\x02 \x01(\x0b\x32\x13.tests.v1.SetOption\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x33\n\x07request\x18\x04 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xb5\x01\n\nUpdateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x33\n\x07request\x18\x04 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xe6\x01\n\x0fUpdatePathsTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12(\n\x0b\x66ield_paths\x18\x03 \x03(\x0b\x32\x13.tests.v1.FieldPath\x12\x13\n\x0bjson_values\x18\x04 \x03(\t\x12\x33\n\x07request\x18\x05 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x06 \x01(\x08"\xa2\x01\n\nDeleteTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12\x33\n\x07request\x18\x03 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"=\n\tSetOption\x12\x0b\n\x03\x61ll\x18\x01 \x01(\x08\x12#\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x13.tests.v1.FieldPath"\x88\x01\n\tQueryTest\x12\x11\n\tcoll_path\x18\x01 \x01(\t\x12!\n\x07\x63lauses\x18\x02 \x03(\x0b\x32\x10.tests.v1.Clause\x12\x33\n\x05query\x18\x03 \x01(\x0b\x32$.google.firestore.v1.StructuredQuery\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xbd\x02\n\x06\x43lause\x12"\n\x06select\x18\x01 \x01(\x0b\x32\x10.tests.v1.SelectH\x00\x12 \n\x05where\x18\x02 \x01(\x0b\x32\x0f.tests.v1.WhereH\x00\x12%\n\x08order_by\x18\x03 \x01(\x0b\x32\x11.tests.v1.OrderByH\x00\x12\x10\n\x06offset\x18\x04 \x01(\x05H\x00\x12\x0f\n\x05limit\x18\x05 \x01(\x05H\x00\x12$\n\x08start_at\x18\x06 \x01(\x0b\x32\x10.tests.v1.CursorH\x00\x12\'\n\x0bstart_after\x18\x07 \x01(\x0b\x32\x10.tests.v1.CursorH\x00\x12"\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32\x10.tests.v1.CursorH\x00\x12&\n\nend_before\x18\t \x01(\x0b\x32\x10.tests.v1.CursorH\x00\x42\x08\n\x06\x63lause"-\n\x06Select\x12#\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x13.tests.v1.FieldPath"J\n\x05Where\x12!\n\x04path\x18\x01 \x01(\x0b\x32\x13.tests.v1.FieldPath\x12\n\n\x02op\x18\x02 \x01(\t\x12\x12\n\njson_value\x18\x03 \x01(\t"?\n\x07OrderBy\x12!\n\x04path\x18\x01 \x01(\x0b\x32\x13.tests.v1.FieldPath\x12\x11\n\tdirection\x18\x02 \x01(\t"J\n\x06\x43ursor\x12+\n\x0c\x64oc_snapshot\x18\x01 \x01(\x0b\x32\x15.tests.v1.DocSnapshot\x12\x13\n\x0bjson_values\x18\x02 \x03(\t".\n\x0b\x44ocSnapshot\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t"\x1a\n\tFieldPath\x12\r\n\x05\x66ield\x18\x01 \x03(\t"}\n\nListenTest\x12\x36\n\tresponses\x18\x01 \x03(\x0b\x32#.google.firestore.v1.ListenResponse\x12%\n\tsnapshots\x18\x02 \x03(\x0b\x32\x12.tests.v1.Snapshot\x12\x10\n\x08is_error\x18\x03 \x01(\x08"\x8c\x01\n\x08Snapshot\x12+\n\x04\x64ocs\x18\x01 \x03(\x0b\x32\x1d.google.firestore.v1.Document\x12$\n\x07\x63hanges\x18\x02 \x03(\x0b\x32\x13.tests.v1.DocChange\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xc9\x01\n\tDocChange\x12&\n\x04kind\x18\x01 \x01(\x0e\x32\x18.tests.v1.DocChange.Kind\x12*\n\x03\x64oc\x18\x02 \x01(\x0b\x32\x1d.google.firestore.v1.Document\x12\x11\n\told_index\x18\x03 \x01(\x05\x12\x11\n\tnew_index\x18\x04 \x01(\x05"B\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\t\n\x05\x41\x44\x44\x45\x44\x10\x01\x12\x0b\n\x07REMOVED\x10\x02\x12\x0c\n\x08MODIFIED\x10\x03\x42x\n&com.google.cloud.firestore.conformance\xaa\x02"Google.Cloud.Firestore.Tests.Proto\xca\x02(Google\\Cloud\\Firestore\\Tests\\Conformanceb\x06proto3' + ), + dependencies=[ + google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2.DESCRIPTOR, + google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2.DESCRIPTOR, + google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2.DESCRIPTOR, + google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], +) + + +_DOCCHANGE_KIND = _descriptor.EnumDescriptor( + name="Kind", + full_name="tests.v1.DocChange.Kind", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="KIND_UNSPECIFIED", index=0, number=0, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="ADDED", index=1, number=1, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="REMOVED", index=2, number=2, options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="MODIFIED", index=3, number=3, options=None, type=None + ), + ], + containing_type=None, + options=None, + serialized_start=2875, + serialized_end=2941, +) +_sym_db.RegisterEnumDescriptor(_DOCCHANGE_KIND) + + +_TESTSUITE = _descriptor.Descriptor( + name="TestSuite", + full_name="tests.v1.TestSuite", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="tests", + full_name="tests.v1.TestSuite.tests", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=248, + serialized_end=290, +) + + +_TEST = _descriptor.Descriptor( + name="Test", + full_name="tests.v1.Test", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="description", + full_name="tests.v1.Test.description", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="get", + full_name="tests.v1.Test.get", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="create", + full_name="tests.v1.Test.create", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="set", + full_name="tests.v1.Test.set", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update", + full_name="tests.v1.Test.update", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_paths", + full_name="tests.v1.Test.update_paths", + index=5, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="delete", + full_name="tests.v1.Test.delete", + index=6, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="query", + full_name="tests.v1.Test.query", + index=7, + number=8, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="listen", + full_name="tests.v1.Test.listen", + index=8, + number=9, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="test", + full_name="tests.v1.Test.test", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=293, + serialized_end=645, +) + + +_GETTEST = _descriptor.Descriptor( + name="GetTest", + full_name="tests.v1.GetTest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="doc_ref_path", + full_name="tests.v1.GetTest.doc_ref_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="request", + full_name="tests.v1.GetTest.request", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=647, + serialized_end=736, +) + + +_CREATETEST = _descriptor.Descriptor( + name="CreateTest", + full_name="tests.v1.CreateTest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="doc_ref_path", + full_name="tests.v1.CreateTest.doc_ref_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="json_data", + full_name="tests.v1.CreateTest.json_data", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="request", + full_name="tests.v1.CreateTest.request", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="is_error", + full_name="tests.v1.CreateTest.is_error", + index=3, + number=4, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=738, + serialized_end=862, +) + + +_SETTEST = _descriptor.Descriptor( + name="SetTest", + full_name="tests.v1.SetTest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="doc_ref_path", + full_name="tests.v1.SetTest.doc_ref_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="option", + full_name="tests.v1.SetTest.option", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="json_data", + full_name="tests.v1.SetTest.json_data", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="request", + full_name="tests.v1.SetTest.request", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="is_error", + full_name="tests.v1.SetTest.is_error", + index=4, + number=5, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=865, + serialized_end=1023, +) + + +_UPDATETEST = _descriptor.Descriptor( + name="UpdateTest", + full_name="tests.v1.UpdateTest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="doc_ref_path", + full_name="tests.v1.UpdateTest.doc_ref_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="precondition", + full_name="tests.v1.UpdateTest.precondition", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="json_data", + full_name="tests.v1.UpdateTest.json_data", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="request", + full_name="tests.v1.UpdateTest.request", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="is_error", + full_name="tests.v1.UpdateTest.is_error", + index=4, + number=5, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1026, + serialized_end=1207, +) + + +_UPDATEPATHSTEST = _descriptor.Descriptor( + name="UpdatePathsTest", + full_name="tests.v1.UpdatePathsTest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="doc_ref_path", + full_name="tests.v1.UpdatePathsTest.doc_ref_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="precondition", + full_name="tests.v1.UpdatePathsTest.precondition", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="field_paths", + full_name="tests.v1.UpdatePathsTest.field_paths", + index=2, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="json_values", + full_name="tests.v1.UpdatePathsTest.json_values", + index=3, + number=4, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="request", + full_name="tests.v1.UpdatePathsTest.request", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="is_error", + full_name="tests.v1.UpdatePathsTest.is_error", + index=5, + number=6, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1210, + serialized_end=1440, +) + + +_DELETETEST = _descriptor.Descriptor( + name="DeleteTest", + full_name="tests.v1.DeleteTest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="doc_ref_path", + full_name="tests.v1.DeleteTest.doc_ref_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="precondition", + full_name="tests.v1.DeleteTest.precondition", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="request", + full_name="tests.v1.DeleteTest.request", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="is_error", + full_name="tests.v1.DeleteTest.is_error", + index=3, + number=4, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1443, + serialized_end=1605, +) + + +_SETOPTION = _descriptor.Descriptor( + name="SetOption", + full_name="tests.v1.SetOption", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="all", + full_name="tests.v1.SetOption.all", + index=0, + number=1, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="fields", + full_name="tests.v1.SetOption.fields", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1607, + serialized_end=1668, +) + + +_QUERYTEST = _descriptor.Descriptor( + name="QueryTest", + full_name="tests.v1.QueryTest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="coll_path", + full_name="tests.v1.QueryTest.coll_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="clauses", + full_name="tests.v1.QueryTest.clauses", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="query", + full_name="tests.v1.QueryTest.query", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="is_error", + full_name="tests.v1.QueryTest.is_error", + index=3, + number=4, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1671, + serialized_end=1807, +) + + +_CLAUSE = _descriptor.Descriptor( + name="Clause", + full_name="tests.v1.Clause", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="select", + full_name="tests.v1.Clause.select", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="where", + full_name="tests.v1.Clause.where", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="order_by", + full_name="tests.v1.Clause.order_by", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="offset", + full_name="tests.v1.Clause.offset", + index=3, + number=4, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="limit", + full_name="tests.v1.Clause.limit", + index=4, + number=5, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="start_at", + full_name="tests.v1.Clause.start_at", + index=5, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="start_after", + full_name="tests.v1.Clause.start_after", + index=6, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="end_at", + full_name="tests.v1.Clause.end_at", + index=7, + number=8, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="end_before", + full_name="tests.v1.Clause.end_before", + index=8, + number=9, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="clause", + full_name="tests.v1.Clause.clause", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=1810, + serialized_end=2127, +) + + +_SELECT = _descriptor.Descriptor( + name="Select", + full_name="tests.v1.Select", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="fields", + full_name="tests.v1.Select.fields", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2129, + serialized_end=2174, +) + + +_WHERE = _descriptor.Descriptor( + name="Where", + full_name="tests.v1.Where", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="path", + full_name="tests.v1.Where.path", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="op", + full_name="tests.v1.Where.op", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="json_value", + full_name="tests.v1.Where.json_value", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2176, + serialized_end=2250, +) + + +_ORDERBY = _descriptor.Descriptor( + name="OrderBy", + full_name="tests.v1.OrderBy", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="path", + full_name="tests.v1.OrderBy.path", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="direction", + full_name="tests.v1.OrderBy.direction", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2252, + serialized_end=2315, +) + + +_CURSOR = _descriptor.Descriptor( + name="Cursor", + full_name="tests.v1.Cursor", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="doc_snapshot", + full_name="tests.v1.Cursor.doc_snapshot", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="json_values", + full_name="tests.v1.Cursor.json_values", + index=1, + number=2, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2317, + serialized_end=2391, +) + + +_DOCSNAPSHOT = _descriptor.Descriptor( + name="DocSnapshot", + full_name="tests.v1.DocSnapshot", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="path", + full_name="tests.v1.DocSnapshot.path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="json_data", + full_name="tests.v1.DocSnapshot.json_data", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2393, + serialized_end=2439, +) + + +_FIELDPATH = _descriptor.Descriptor( + name="FieldPath", + full_name="tests.v1.FieldPath", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="field", + full_name="tests.v1.FieldPath.field", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2441, + serialized_end=2467, +) + + +_LISTENTEST = _descriptor.Descriptor( + name="ListenTest", + full_name="tests.v1.ListenTest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="responses", + full_name="tests.v1.ListenTest.responses", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="snapshots", + full_name="tests.v1.ListenTest.snapshots", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="is_error", + full_name="tests.v1.ListenTest.is_error", + index=2, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2469, + serialized_end=2594, +) + + +_SNAPSHOT = _descriptor.Descriptor( + name="Snapshot", + full_name="tests.v1.Snapshot", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="docs", + full_name="tests.v1.Snapshot.docs", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="changes", + full_name="tests.v1.Snapshot.changes", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="read_time", + full_name="tests.v1.Snapshot.read_time", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2597, + serialized_end=2737, +) + + +_DOCCHANGE = _descriptor.Descriptor( + name="DocChange", + full_name="tests.v1.DocChange", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="kind", + full_name="tests.v1.DocChange.kind", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="doc", + full_name="tests.v1.DocChange.doc", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="old_index", + full_name="tests.v1.DocChange.old_index", + index=2, + number=3, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="new_index", + full_name="tests.v1.DocChange.new_index", + index=3, + number=4, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_DOCCHANGE_KIND], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2740, + serialized_end=2941, +) + +_TESTSUITE.fields_by_name["tests"].message_type = _TEST +_TEST.fields_by_name["get"].message_type = _GETTEST +_TEST.fields_by_name["create"].message_type = _CREATETEST +_TEST.fields_by_name["set"].message_type = _SETTEST +_TEST.fields_by_name["update"].message_type = _UPDATETEST +_TEST.fields_by_name["update_paths"].message_type = _UPDATEPATHSTEST +_TEST.fields_by_name["delete"].message_type = _DELETETEST +_TEST.fields_by_name["query"].message_type = _QUERYTEST +_TEST.fields_by_name["listen"].message_type = _LISTENTEST +_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["get"]) +_TEST.fields_by_name["get"].containing_oneof = _TEST.oneofs_by_name["test"] +_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["create"]) +_TEST.fields_by_name["create"].containing_oneof = _TEST.oneofs_by_name["test"] +_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["set"]) +_TEST.fields_by_name["set"].containing_oneof = _TEST.oneofs_by_name["test"] +_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update"]) +_TEST.fields_by_name["update"].containing_oneof = _TEST.oneofs_by_name["test"] +_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update_paths"]) +_TEST.fields_by_name["update_paths"].containing_oneof = _TEST.oneofs_by_name["test"] +_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["delete"]) +_TEST.fields_by_name["delete"].containing_oneof = _TEST.oneofs_by_name["test"] +_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["query"]) +_TEST.fields_by_name["query"].containing_oneof = _TEST.oneofs_by_name["test"] +_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["listen"]) +_TEST.fields_by_name["listen"].containing_oneof = _TEST.oneofs_by_name["test"] +_GETTEST.fields_by_name[ + "request" +].message_type = ( + google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._GETDOCUMENTREQUEST +) +_CREATETEST.fields_by_name[ + "request" +].message_type = ( + google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST +) +_SETTEST.fields_by_name["option"].message_type = _SETOPTION +_SETTEST.fields_by_name[ + "request" +].message_type = ( + google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST +) +_UPDATETEST.fields_by_name[ + "precondition" +].message_type = ( + google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION +) +_UPDATETEST.fields_by_name[ + "request" +].message_type = ( + google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST +) +_UPDATEPATHSTEST.fields_by_name[ + "precondition" +].message_type = ( + google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION +) +_UPDATEPATHSTEST.fields_by_name["field_paths"].message_type = _FIELDPATH +_UPDATEPATHSTEST.fields_by_name[ + "request" +].message_type = ( + google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST +) +_DELETETEST.fields_by_name[ + "precondition" +].message_type = ( + google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION +) +_DELETETEST.fields_by_name[ + "request" +].message_type = ( + google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST +) +_SETOPTION.fields_by_name["fields"].message_type = _FIELDPATH +_QUERYTEST.fields_by_name["clauses"].message_type = _CLAUSE +_QUERYTEST.fields_by_name[ + "query" +].message_type = ( + google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2._STRUCTUREDQUERY +) +_CLAUSE.fields_by_name["select"].message_type = _SELECT +_CLAUSE.fields_by_name["where"].message_type = _WHERE +_CLAUSE.fields_by_name["order_by"].message_type = _ORDERBY +_CLAUSE.fields_by_name["start_at"].message_type = _CURSOR +_CLAUSE.fields_by_name["start_after"].message_type = _CURSOR +_CLAUSE.fields_by_name["end_at"].message_type = _CURSOR +_CLAUSE.fields_by_name["end_before"].message_type = _CURSOR +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["select"]) +_CLAUSE.fields_by_name["select"].containing_oneof = _CLAUSE.oneofs_by_name["clause"] +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["where"]) +_CLAUSE.fields_by_name["where"].containing_oneof = _CLAUSE.oneofs_by_name["clause"] +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["order_by"]) +_CLAUSE.fields_by_name["order_by"].containing_oneof = _CLAUSE.oneofs_by_name["clause"] +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["offset"]) +_CLAUSE.fields_by_name["offset"].containing_oneof = _CLAUSE.oneofs_by_name["clause"] +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["limit"]) +_CLAUSE.fields_by_name["limit"].containing_oneof = _CLAUSE.oneofs_by_name["clause"] +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_at"]) +_CLAUSE.fields_by_name["start_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"] +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_after"]) +_CLAUSE.fields_by_name["start_after"].containing_oneof = _CLAUSE.oneofs_by_name[ + "clause" +] +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_at"]) +_CLAUSE.fields_by_name["end_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"] +_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_before"]) +_CLAUSE.fields_by_name["end_before"].containing_oneof = _CLAUSE.oneofs_by_name["clause"] +_SELECT.fields_by_name["fields"].message_type = _FIELDPATH +_WHERE.fields_by_name["path"].message_type = _FIELDPATH +_ORDERBY.fields_by_name["path"].message_type = _FIELDPATH +_CURSOR.fields_by_name["doc_snapshot"].message_type = _DOCSNAPSHOT +_LISTENTEST.fields_by_name[ + "responses" +].message_type = ( + google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._LISTENRESPONSE +) +_LISTENTEST.fields_by_name["snapshots"].message_type = _SNAPSHOT +_SNAPSHOT.fields_by_name[ + "docs" +].message_type = ( + google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2._DOCUMENT +) +_SNAPSHOT.fields_by_name["changes"].message_type = _DOCCHANGE +_SNAPSHOT.fields_by_name[ + "read_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DOCCHANGE.fields_by_name["kind"].enum_type = _DOCCHANGE_KIND +_DOCCHANGE.fields_by_name[ + "doc" +].message_type = ( + google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2._DOCUMENT +) +_DOCCHANGE_KIND.containing_type = _DOCCHANGE +DESCRIPTOR.message_types_by_name["TestSuite"] = _TESTSUITE +DESCRIPTOR.message_types_by_name["Test"] = _TEST +DESCRIPTOR.message_types_by_name["GetTest"] = _GETTEST +DESCRIPTOR.message_types_by_name["CreateTest"] = _CREATETEST +DESCRIPTOR.message_types_by_name["SetTest"] = _SETTEST +DESCRIPTOR.message_types_by_name["UpdateTest"] = _UPDATETEST +DESCRIPTOR.message_types_by_name["UpdatePathsTest"] = _UPDATEPATHSTEST +DESCRIPTOR.message_types_by_name["DeleteTest"] = _DELETETEST +DESCRIPTOR.message_types_by_name["SetOption"] = _SETOPTION +DESCRIPTOR.message_types_by_name["QueryTest"] = _QUERYTEST +DESCRIPTOR.message_types_by_name["Clause"] = _CLAUSE +DESCRIPTOR.message_types_by_name["Select"] = _SELECT +DESCRIPTOR.message_types_by_name["Where"] = _WHERE +DESCRIPTOR.message_types_by_name["OrderBy"] = _ORDERBY +DESCRIPTOR.message_types_by_name["Cursor"] = _CURSOR +DESCRIPTOR.message_types_by_name["DocSnapshot"] = _DOCSNAPSHOT +DESCRIPTOR.message_types_by_name["FieldPath"] = _FIELDPATH +DESCRIPTOR.message_types_by_name["ListenTest"] = _LISTENTEST +DESCRIPTOR.message_types_by_name["Snapshot"] = _SNAPSHOT +DESCRIPTOR.message_types_by_name["DocChange"] = _DOCCHANGE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +TestSuite = _reflection.GeneratedProtocolMessageType( + "TestSuite", + (_message.Message,), + dict( + DESCRIPTOR=_TESTSUITE, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.TestSuite) + ), +) +_sym_db.RegisterMessage(TestSuite) + +Test = _reflection.GeneratedProtocolMessageType( + "Test", + (_message.Message,), + dict( + DESCRIPTOR=_TEST, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.Test) + ), +) +_sym_db.RegisterMessage(Test) + +GetTest = _reflection.GeneratedProtocolMessageType( + "GetTest", + (_message.Message,), + dict( + DESCRIPTOR=_GETTEST, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.GetTest) + ), +) +_sym_db.RegisterMessage(GetTest) + +CreateTest = _reflection.GeneratedProtocolMessageType( + "CreateTest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATETEST, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.CreateTest) + ), +) +_sym_db.RegisterMessage(CreateTest) + +SetTest = _reflection.GeneratedProtocolMessageType( + "SetTest", + (_message.Message,), + dict( + DESCRIPTOR=_SETTEST, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.SetTest) + ), +) +_sym_db.RegisterMessage(SetTest) + +UpdateTest = _reflection.GeneratedProtocolMessageType( + "UpdateTest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATETEST, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.UpdateTest) + ), +) +_sym_db.RegisterMessage(UpdateTest) + +UpdatePathsTest = _reflection.GeneratedProtocolMessageType( + "UpdatePathsTest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATEPATHSTEST, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.UpdatePathsTest) + ), +) +_sym_db.RegisterMessage(UpdatePathsTest) + +DeleteTest = _reflection.GeneratedProtocolMessageType( + "DeleteTest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETETEST, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.DeleteTest) + ), +) +_sym_db.RegisterMessage(DeleteTest) + +SetOption = _reflection.GeneratedProtocolMessageType( + "SetOption", + (_message.Message,), + dict( + DESCRIPTOR=_SETOPTION, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.SetOption) + ), +) +_sym_db.RegisterMessage(SetOption) + +QueryTest = _reflection.GeneratedProtocolMessageType( + "QueryTest", + (_message.Message,), + dict( + DESCRIPTOR=_QUERYTEST, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.QueryTest) + ), +) +_sym_db.RegisterMessage(QueryTest) + +Clause = _reflection.GeneratedProtocolMessageType( + "Clause", + (_message.Message,), + dict( + DESCRIPTOR=_CLAUSE, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.Clause) + ), +) +_sym_db.RegisterMessage(Clause) + +Select = _reflection.GeneratedProtocolMessageType( + "Select", + (_message.Message,), + dict( + DESCRIPTOR=_SELECT, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.Select) + ), +) +_sym_db.RegisterMessage(Select) + +Where = _reflection.GeneratedProtocolMessageType( + "Where", + (_message.Message,), + dict( + DESCRIPTOR=_WHERE, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.Where) + ), +) +_sym_db.RegisterMessage(Where) + +OrderBy = _reflection.GeneratedProtocolMessageType( + "OrderBy", + (_message.Message,), + dict( + DESCRIPTOR=_ORDERBY, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.OrderBy) + ), +) +_sym_db.RegisterMessage(OrderBy) + +Cursor = _reflection.GeneratedProtocolMessageType( + "Cursor", + (_message.Message,), + dict( + DESCRIPTOR=_CURSOR, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.Cursor) + ), +) +_sym_db.RegisterMessage(Cursor) + +DocSnapshot = _reflection.GeneratedProtocolMessageType( + "DocSnapshot", + (_message.Message,), + dict( + DESCRIPTOR=_DOCSNAPSHOT, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.DocSnapshot) + ), +) +_sym_db.RegisterMessage(DocSnapshot) + +FieldPath = _reflection.GeneratedProtocolMessageType( + "FieldPath", + (_message.Message,), + dict( + DESCRIPTOR=_FIELDPATH, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.FieldPath) + ), +) +_sym_db.RegisterMessage(FieldPath) + +ListenTest = _reflection.GeneratedProtocolMessageType( + "ListenTest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTENTEST, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.ListenTest) + ), +) +_sym_db.RegisterMessage(ListenTest) + +Snapshot = _reflection.GeneratedProtocolMessageType( + "Snapshot", + (_message.Message,), + dict( + DESCRIPTOR=_SNAPSHOT, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.Snapshot) + ), +) +_sym_db.RegisterMessage(Snapshot) + +DocChange = _reflection.GeneratedProtocolMessageType( + "DocChange", + (_message.Message,), + dict( + DESCRIPTOR=_DOCCHANGE, + __module__="test_v1_pb2" + # @@protoc_insertion_point(class_scope:tests.v1.DocChange) + ), +) +_sym_db.RegisterMessage(DocChange) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions( + descriptor_pb2.FileOptions(), + _b( + '\n&com.google.cloud.firestore.conformance\252\002"Google.Cloud.Firestore.Tests.Proto\312\002(Google\\Cloud\\Firestore\\Tests\\Conformance' + ), +) +# @@protoc_insertion_point(module_scope) diff --git a/firestore/google/cloud/firestore_v1/query.py b/firestore/google/cloud/firestore_v1/query.py new file mode 100644 index 000000000000..6c6239989e8f --- /dev/null +++ b/firestore/google/cloud/firestore_v1/query.py @@ -0,0 +1,970 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Classes for representing queries for the Google Cloud Firestore API. + +A :class:`~.firestore_v1.query.Query` can be created directly from +a :class:`~.firestore_v1.collection.Collection` and that can be +a more common way to create a query than direct usage of the constructor. +""" +import copy +import math +import warnings + +from google.protobuf import wrappers_pb2 +import six + +from google.cloud.firestore_v1 import _helpers +from google.cloud.firestore_v1 import document +from google.cloud.firestore_v1 import field_path as field_path_module +from google.cloud.firestore_v1 import transforms +from google.cloud.firestore_v1.gapic import enums +from google.cloud.firestore_v1.proto import query_pb2 +from google.cloud.firestore_v1.order import Order +from google.cloud.firestore_v1.watch import Watch + +_EQ_OP = "==" +_operator_enum = enums.StructuredQuery.FieldFilter.Operator +_COMPARISON_OPERATORS = { + "<": _operator_enum.LESS_THAN, + "<=": _operator_enum.LESS_THAN_OR_EQUAL, + _EQ_OP: _operator_enum.EQUAL, + ">=": _operator_enum.GREATER_THAN_OR_EQUAL, + ">": _operator_enum.GREATER_THAN, + "array_contains": _operator_enum.ARRAY_CONTAINS, +} +_BAD_OP_STRING = "Operator string {!r} is invalid. Valid choices are: {}." +_BAD_OP_NAN_NULL = 'Only an equality filter ("==") can be used with None or NaN values' +_INVALID_WHERE_TRANSFORM = "Transforms cannot be used as where values." +_BAD_DIR_STRING = "Invalid direction {!r}. Must be one of {!r} or {!r}." +_INVALID_CURSOR_TRANSFORM = "Transforms cannot be used as cursor values." +_MISSING_ORDER_BY = ( + 'The "order by" field path {!r} is not present in the cursor data {!r}. ' + "All fields sent to ``order_by()`` must be present in the fields " + "if passed to one of ``start_at()`` / ``start_after()`` / " + "``end_before()`` / ``end_at()`` to define a cursor." +) +_NO_ORDERS_FOR_CURSOR = ( + "Attempting to create a cursor with no fields to order on. " + "When defining a cursor with one of ``start_at()`` / ``start_after()`` / " + "``end_before()`` / ``end_at()``, all fields in the cursor must " + "come from fields set in ``order_by()``." +) +_MISMATCH_CURSOR_W_ORDER_BY = "The cursor {!r} does not match the order fields {!r}." + + +class Query(object): + """Represents a query to the Firestore API. + + Instances of this class are considered immutable: all methods that + would modify an instance instead return a new instance. + + Args: + parent (~.firestore_v1.collection.Collection): The collection + that this query applies to. + projection (Optional[google.cloud.proto.firestore.v1.\ + query_pb2.StructuredQuery.Projection]): A projection of document + fields to limit the query results to. + field_filters (Optional[Tuple[google.cloud.proto.firestore.v1.\ + query_pb2.StructuredQuery.FieldFilter, ...]]): The filters to be + applied in the query. + orders (Optional[Tuple[google.cloud.proto.firestore.v1.\ + query_pb2.StructuredQuery.Order, ...]]): The "order by" entries + to use in the query. + limit (Optional[int]): The maximum number of documents the + query is allowed to return. + offset (Optional[int]): The number of results to skip. + start_at (Optional[Tuple[dict, bool]]): Two-tuple of + + * a mapping of fields. Any field that is present in this mapping + must also be present in ``orders`` + * an ``after`` flag + + The fields and the flag combine to form a cursor used as + a starting point in a query result set. If the ``after`` + flag is :data:`True`, the results will start just after any + documents which have fields matching the cursor, otherwise + any matching documents will be included in the result set. + When the query is formed, the document values + will be used in the order given by ``orders``. + end_at (Optional[Tuple[dict, bool]]): Two-tuple of + + * a mapping of fields. Any field that is present in this mapping + must also be present in ``orders`` + * a ``before`` flag + + The fields and the flag combine to form a cursor used as + an ending point in a query result set. If the ``before`` + flag is :data:`True`, the results will end just before any + documents which have fields matching the cursor, otherwise + any matching documents will be included in the result set. + When the query is formed, the document values + will be used in the order given by ``orders``. + """ + + ASCENDING = "ASCENDING" + """str: Sort query results in ascending order on a field.""" + DESCENDING = "DESCENDING" + """str: Sort query results in descending order on a field.""" + + def __init__( + self, + parent, + projection=None, + field_filters=(), + orders=(), + limit=None, + offset=None, + start_at=None, + end_at=None, + ): + self._parent = parent + self._projection = projection + self._field_filters = field_filters + self._orders = orders + self._limit = limit + self._offset = offset + self._start_at = start_at + self._end_at = end_at + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return NotImplemented + return ( + self._parent == other._parent + and self._projection == other._projection + and self._field_filters == other._field_filters + and self._orders == other._orders + and self._limit == other._limit + and self._offset == other._offset + and self._start_at == other._start_at + and self._end_at == other._end_at + ) + + @property + def _client(self): + """The client of the parent collection. + + Returns: + ~.firestore_v1.client.Client: The client that owns + this query. + """ + return self._parent._client + + def select(self, field_paths): + """Project documents matching query to a limited set of fields. + + See :meth:`~.firestore_v1.client.Client.field_path` for + more information on **field paths**. + + If the current query already has a projection set (i.e. has already + called :meth:`~.firestore_v1.query.Query.select`), this + will overwrite it. + + Args: + field_paths (Iterable[str, ...]): An iterable of field paths + (``.``-delimited list of field names) to use as a projection + of document fields in the query results. + + Returns: + ~.firestore_v1.query.Query: A "projected" query. Acts as + a copy of the current query, modified with the newly added + projection. + Raises: + ValueError: If any ``field_path`` is invalid. + """ + field_paths = list(field_paths) + for field_path in field_paths: + field_path_module.split_field_path(field_path) # raises + + new_projection = query_pb2.StructuredQuery.Projection( + fields=[ + query_pb2.StructuredQuery.FieldReference(field_path=field_path) + for field_path in field_paths + ] + ) + return self.__class__( + self._parent, + projection=new_projection, + field_filters=self._field_filters, + orders=self._orders, + limit=self._limit, + offset=self._offset, + start_at=self._start_at, + end_at=self._end_at, + ) + + def where(self, field_path, op_string, value): + """Filter the query on a field. + + See :meth:`~.firestore_v1.client.Client.field_path` for + more information on **field paths**. + + Returns a new :class:`~.firestore_v1.query.Query` that + filters on a specific field path, according to an operation (e.g. + ``==`` or "equals") and a particular value to be paired with that + operation. + + Args: + field_path (str): A field path (``.``-delimited list of + field names) for the field to filter on. + op_string (str): A comparison operation in the form of a string. + Acceptable values are ``<``, ``<=``, ``==``, ``>=`` + and ``>``. + value (Any): The value to compare the field against in the filter. + If ``value`` is :data:`None` or a NaN, then ``==`` is the only + allowed operation. + + Returns: + ~.firestore_v1.query.Query: A filtered query. Acts as a + copy of the current query, modified with the newly added filter. + + Raises: + ValueError: If ``field_path`` is invalid. + ValueError: If ``value`` is a NaN or :data:`None` and + ``op_string`` is not ``==``. + """ + field_path_module.split_field_path(field_path) # raises + + if value is None: + if op_string != _EQ_OP: + raise ValueError(_BAD_OP_NAN_NULL) + filter_pb = query_pb2.StructuredQuery.UnaryFilter( + field=query_pb2.StructuredQuery.FieldReference(field_path=field_path), + op=enums.StructuredQuery.UnaryFilter.Operator.IS_NULL, + ) + elif _isnan(value): + if op_string != _EQ_OP: + raise ValueError(_BAD_OP_NAN_NULL) + filter_pb = query_pb2.StructuredQuery.UnaryFilter( + field=query_pb2.StructuredQuery.FieldReference(field_path=field_path), + op=enums.StructuredQuery.UnaryFilter.Operator.IS_NAN, + ) + elif isinstance(value, (transforms.Sentinel, transforms._ValueList)): + raise ValueError(_INVALID_WHERE_TRANSFORM) + else: + filter_pb = query_pb2.StructuredQuery.FieldFilter( + field=query_pb2.StructuredQuery.FieldReference(field_path=field_path), + op=_enum_from_op_string(op_string), + value=_helpers.encode_value(value), + ) + + new_filters = self._field_filters + (filter_pb,) + return self.__class__( + self._parent, + projection=self._projection, + field_filters=new_filters, + orders=self._orders, + limit=self._limit, + offset=self._offset, + start_at=self._start_at, + end_at=self._end_at, + ) + + @staticmethod + def _make_order(field_path, direction): + """Helper for :meth:`order_by`.""" + return query_pb2.StructuredQuery.Order( + field=query_pb2.StructuredQuery.FieldReference(field_path=field_path), + direction=_enum_from_direction(direction), + ) + + def order_by(self, field_path, direction=ASCENDING): + """Modify the query to add an order clause on a specific field. + + See :meth:`~.firestore_v1.client.Client.field_path` for + more information on **field paths**. + + Successive :meth:`~.firestore_v1.query.Query.order_by` calls + will further refine the ordering of results returned by the query + (i.e. the new "order by" fields will be added to existing ones). + + Args: + field_path (str): A field path (``.``-delimited list of + field names) on which to order the query results. + direction (Optional[str]): The direction to order by. Must be one + of :attr:`ASCENDING` or :attr:`DESCENDING`, defaults to + :attr:`ASCENDING`. + + Returns: + ~.firestore_v1.query.Query: An ordered query. Acts as a + copy of the current query, modified with the newly added + "order by" constraint. + + Raises: + ValueError: If ``field_path`` is invalid. + ValueError: If ``direction`` is not one of :attr:`ASCENDING` or + :attr:`DESCENDING`. + """ + field_path_module.split_field_path(field_path) # raises + + order_pb = self._make_order(field_path, direction) + + new_orders = self._orders + (order_pb,) + return self.__class__( + self._parent, + projection=self._projection, + field_filters=self._field_filters, + orders=new_orders, + limit=self._limit, + offset=self._offset, + start_at=self._start_at, + end_at=self._end_at, + ) + + def limit(self, count): + """Limit a query to return a fixed number of results. + + If the current query already has a limit set, this will overwrite it. + + Args: + count (int): Maximum number of documents to return that match + the query. + + Returns: + ~.firestore_v1.query.Query: A limited query. Acts as a + copy of the current query, modified with the newly added + "limit" filter. + """ + return self.__class__( + self._parent, + projection=self._projection, + field_filters=self._field_filters, + orders=self._orders, + limit=count, + offset=self._offset, + start_at=self._start_at, + end_at=self._end_at, + ) + + def offset(self, num_to_skip): + """Skip to an offset in a query. + + If the current query already has specified an offset, this will + overwrite it. + + Args: + num_to_skip (int): The number of results to skip at the beginning + of query results. (Must be non-negative.) + + Returns: + ~.firestore_v1.query.Query: An offset query. Acts as a + copy of the current query, modified with the newly added + "offset" field. + """ + return self.__class__( + self._parent, + projection=self._projection, + field_filters=self._field_filters, + orders=self._orders, + limit=self._limit, + offset=num_to_skip, + start_at=self._start_at, + end_at=self._end_at, + ) + + def _cursor_helper(self, document_fields, before, start): + """Set values to be used for a ``start_at`` or ``end_at`` cursor. + + The values will later be used in a query protobuf. + + When the query is sent to the server, the ``document_fields`` will + be used in the order given by fields set by + :meth:`~.firestore_v1.query.Query.order_by`. + + Args: + document_fields (Union[~.firestore_v1.\ + document.DocumentSnapshot, dict, list, tuple]): a document + snapshot or a dictionary/list/tuple of fields representing a + query results cursor. A cursor is a collection of values that + represent a position in a query result set. + before (bool): Flag indicating if the document in + ``document_fields`` should (:data:`False`) or + shouldn't (:data:`True`) be included in the result set. + start (Optional[bool]): determines if the cursor is a ``start_at`` + cursor (:data:`True`) or an ``end_at`` cursor (:data:`False`). + + Returns: + ~.firestore_v1.query.Query: A query with cursor. Acts as + a copy of the current query, modified with the newly added + "start at" cursor. + """ + if isinstance(document_fields, tuple): + document_fields = list(document_fields) + elif isinstance(document_fields, document.DocumentSnapshot): + if document_fields.reference._path[:-1] != self._parent._path: + raise ValueError( + "Cannot use snapshot from another collection as a cursor." + ) + else: + # NOTE: We copy so that the caller can't modify after calling. + document_fields = copy.deepcopy(document_fields) + + cursor_pair = document_fields, before + query_kwargs = { + "projection": self._projection, + "field_filters": self._field_filters, + "orders": self._orders, + "limit": self._limit, + "offset": self._offset, + } + if start: + query_kwargs["start_at"] = cursor_pair + query_kwargs["end_at"] = self._end_at + else: + query_kwargs["start_at"] = self._start_at + query_kwargs["end_at"] = cursor_pair + + return self.__class__(self._parent, **query_kwargs) + + def start_at(self, document_fields): + """Start query results at a particular document value. + + The result set will **include** the document specified by + ``document_fields``. + + If the current query already has specified a start cursor -- either + via this method or + :meth:`~.firestore_v1.query.Query.start_after` -- this will + overwrite it. + + When the query is sent to the server, the ``document_fields`` will + be used in the order given by fields set by + :meth:`~.firestore_v1.query.Query.order_by`. + + Args: + document_fields (Union[~.firestore_v1.\ + document.DocumentSnapshot, dict, list, tuple]): a document + snapshot or a dictionary/list/tuple of fields representing a + query results cursor. A cursor is a collection of values that + represent a position in a query result set. + + Returns: + ~.firestore_v1.query.Query: A query with cursor. Acts as + a copy of the current query, modified with the newly added + "start at" cursor. + """ + return self._cursor_helper(document_fields, before=True, start=True) + + def start_after(self, document_fields): + """Start query results after a particular document value. + + The result set will **exclude** the document specified by + ``document_fields``. + + If the current query already has specified a start cursor -- either + via this method or + :meth:`~.firestore_v1.query.Query.start_at` -- this will + overwrite it. + + When the query is sent to the server, the ``document_fields`` will + be used in the order given by fields set by + :meth:`~.firestore_v1.query.Query.order_by`. + + Args: + document_fields (Union[~.firestore_v1.\ + document.DocumentSnapshot, dict, list, tuple]): a document + snapshot or a dictionary/list/tuple of fields representing a + query results cursor. A cursor is a collection of values that + represent a position in a query result set. + + Returns: + ~.firestore_v1.query.Query: A query with cursor. Acts as + a copy of the current query, modified with the newly added + "start after" cursor. + """ + return self._cursor_helper(document_fields, before=False, start=True) + + def end_before(self, document_fields): + """End query results before a particular document value. + + The result set will **exclude** the document specified by + ``document_fields``. + + If the current query already has specified an end cursor -- either + via this method or + :meth:`~.firestore_v1.query.Query.end_at` -- this will + overwrite it. + + When the query is sent to the server, the ``document_fields`` will + be used in the order given by fields set by + :meth:`~.firestore_v1.query.Query.order_by`. + + Args: + document_fields (Union[~.firestore_v1.\ + document.DocumentSnapshot, dict, list, tuple]): a document + snapshot or a dictionary/list/tuple of fields representing a + query results cursor. A cursor is a collection of values that + represent a position in a query result set. + + Returns: + ~.firestore_v1.query.Query: A query with cursor. Acts as + a copy of the current query, modified with the newly added + "end before" cursor. + """ + return self._cursor_helper(document_fields, before=True, start=False) + + def end_at(self, document_fields): + """End query results at a particular document value. + + The result set will **include** the document specified by + ``document_fields``. + + If the current query already has specified an end cursor -- either + via this method or + :meth:`~.firestore_v1.query.Query.end_before` -- this will + overwrite it. + + When the query is sent to the server, the ``document_fields`` will + be used in the order given by fields set by + :meth:`~.firestore_v1.query.Query.order_by`. + + Args: + document_fields (Union[~.firestore_v1.\ + document.DocumentSnapshot, dict, list, tuple]): a document + snapshot or a dictionary/list/tuple of fields representing a + query results cursor. A cursor is a collection of values that + represent a position in a query result set. + + Returns: + ~.firestore_v1.query.Query: A query with cursor. Acts as + a copy of the current query, modified with the newly added + "end at" cursor. + """ + return self._cursor_helper(document_fields, before=False, start=False) + + def _filters_pb(self): + """Convert all the filters into a single generic Filter protobuf. + + This may be a lone field filter or unary filter, may be a composite + filter or may be :data:`None`. + + Returns: + google.cloud.firestore_v1.types.\ + StructuredQuery.Filter: A "generic" filter representing the + current query's filters. + """ + num_filters = len(self._field_filters) + if num_filters == 0: + return None + elif num_filters == 1: + return _filter_pb(self._field_filters[0]) + else: + composite_filter = query_pb2.StructuredQuery.CompositeFilter( + op=enums.StructuredQuery.CompositeFilter.Operator.AND, + filters=[_filter_pb(filter_) for filter_ in self._field_filters], + ) + return query_pb2.StructuredQuery.Filter(composite_filter=composite_filter) + + @staticmethod + def _normalize_projection(projection): + """Helper: convert field paths to message.""" + if projection is not None: + + fields = list(projection.fields) + + if not fields: + field_ref = query_pb2.StructuredQuery.FieldReference( + field_path="__name__" + ) + return query_pb2.StructuredQuery.Projection(fields=[field_ref]) + + return projection + + def _normalize_orders(self): + """Helper: adjust orders based on cursors, where clauses.""" + orders = list(self._orders) + _has_snapshot_cursor = False + + if self._start_at: + if isinstance(self._start_at[0], document.DocumentSnapshot): + _has_snapshot_cursor = True + + if self._end_at: + if isinstance(self._end_at[0], document.DocumentSnapshot): + _has_snapshot_cursor = True + + if _has_snapshot_cursor: + should_order = [ + _enum_from_op_string(key) + for key in _COMPARISON_OPERATORS + if key not in (_EQ_OP, "array_contains") + ] + order_keys = [order.field.field_path for order in orders] + for filter_ in self._field_filters: + field = filter_.field.field_path + if filter_.op in should_order and field not in order_keys: + orders.append(self._make_order(field, "ASCENDING")) + if not orders: + orders.append(self._make_order("__name__", "ASCENDING")) + else: + order_keys = [order.field.field_path for order in orders] + if "__name__" not in order_keys: + direction = orders[-1].direction # enum? + orders.append(self._make_order("__name__", direction)) + + return orders + + def _normalize_cursor(self, cursor, orders): + """Helper: convert cursor to a list of values based on orders.""" + if cursor is None: + return + + if not orders: + raise ValueError(_NO_ORDERS_FOR_CURSOR) + + document_fields, before = cursor + + order_keys = [order.field.field_path for order in orders] + + if isinstance(document_fields, document.DocumentSnapshot): + snapshot = document_fields + document_fields = snapshot.to_dict() + document_fields["__name__"] = snapshot.reference + + if isinstance(document_fields, dict): + # Transform to list using orders + values = [] + data = document_fields + for order_key in order_keys: + try: + values.append(field_path_module.get_nested_value(order_key, data)) + except KeyError: + msg = _MISSING_ORDER_BY.format(order_key, data) + raise ValueError(msg) + document_fields = values + + if len(document_fields) != len(orders): + msg = _MISMATCH_CURSOR_W_ORDER_BY.format(document_fields, order_keys) + raise ValueError(msg) + + _transform_bases = (transforms.Sentinel, transforms._ValueList) + + for index, key_field in enumerate(zip(order_keys, document_fields)): + key, field = key_field + + if isinstance(field, _transform_bases): + msg = _INVALID_CURSOR_TRANSFORM + raise ValueError(msg) + + if key == "__name__" and isinstance(field, six.string_types): + document_fields[index] = self._parent.document(field) + + return document_fields, before + + def _to_protobuf(self): + """Convert the current query into the equivalent protobuf. + + Returns: + google.cloud.firestore_v1.types.StructuredQuery: The + query protobuf. + """ + projection = self._normalize_projection(self._projection) + orders = self._normalize_orders() + start_at = self._normalize_cursor(self._start_at, orders) + end_at = self._normalize_cursor(self._end_at, orders) + + query_kwargs = { + "select": projection, + "from": [ + query_pb2.StructuredQuery.CollectionSelector( + collection_id=self._parent.id + ) + ], + "where": self._filters_pb(), + "order_by": orders, + "start_at": _cursor_pb(start_at), + "end_at": _cursor_pb(end_at), + } + if self._offset is not None: + query_kwargs["offset"] = self._offset + if self._limit is not None: + query_kwargs["limit"] = wrappers_pb2.Int32Value(value=self._limit) + + return query_pb2.StructuredQuery(**query_kwargs) + + def get(self, transaction=None): + """Deprecated alias for :meth:`stream`.""" + warnings.warn( + "'Query.get' is deprecated: please use 'Query.stream' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.stream(transaction=transaction) + + def stream(self, transaction=None): + """Read the documents in the collection that match this query. + + This sends a ``RunQuery`` RPC and then returns an iterator which + consumes each document returned in the stream of ``RunQueryResponse`` + messages. + + .. note:: + + The underlying stream of responses will time out after + the ``max_rpc_timeout_millis`` value set in the GAPIC + client configuration for the ``RunQuery`` API. Snapshots + not consumed from the iterator before that point will be lost. + + If a ``transaction`` is used and it already has write operations + added, this method cannot be used (i.e. read-after-write is not + allowed). + + Args: + transaction (Optional[~.firestore_v1.transaction.\ + Transaction]): An existing transaction that this query will + run in. + + Yields: + ~.firestore_v1.document.DocumentSnapshot: The next + document that fulfills the query. + """ + parent_path, expected_prefix = self._parent._parent_info() + response_iterator = self._client._firestore_api.run_query( + parent_path, + self._to_protobuf(), + transaction=_helpers.get_transaction_id(transaction), + metadata=self._client._rpc_metadata, + ) + + for response in response_iterator: + snapshot = _query_response_to_snapshot( + response, self._parent, expected_prefix + ) + if snapshot is not None: + yield snapshot + + def on_snapshot(self, callback): + """Monitor the documents in this collection that match this query. + + This starts a watch on this query using a background thread. The + provided callback is run on the snapshot of the documents. + + Args: + callback(~.firestore.query.QuerySnapshot): a callback to run when + a change occurs. + + Example: + from google.cloud import firestore_v1 + + db = firestore_v1.Client() + query_ref = db.collection(u'users').where("user", "==", u'Ada') + + def on_snapshot(docs, changes, read_time): + for doc in docs: + print(u'{} => {}'.format(doc.id, doc.to_dict())) + + # Watch this query + query_watch = query_ref.on_snapshot(on_snapshot) + + # Terminate this watch + query_watch.unsubscribe() + """ + return Watch.for_query( + self, callback, document.DocumentSnapshot, document.DocumentReference + ) + + def _comparator(self, doc1, doc2): + _orders = self._orders + + # Add implicit sorting by name, using the last specified direction. + if len(_orders) == 0: + lastDirection = Query.ASCENDING + else: + if _orders[-1].direction == 1: + lastDirection = Query.ASCENDING + else: + lastDirection = Query.DESCENDING + + orderBys = list(_orders) + + order_pb = query_pb2.StructuredQuery.Order( + field=query_pb2.StructuredQuery.FieldReference(field_path="id"), + direction=_enum_from_direction(lastDirection), + ) + orderBys.append(order_pb) + + for orderBy in orderBys: + if orderBy.field.field_path == "id": + # If ordering by docuent id, compare resource paths. + comp = Order()._compare_to(doc1.reference._path, doc2.reference._path) + else: + if ( + orderBy.field.field_path not in doc1._data + or orderBy.field.field_path not in doc2._data + ): + raise ValueError( + "Can only compare fields that exist in the " + "DocumentSnapshot. Please include the fields you are " + "ordering on in your select() call." + ) + v1 = doc1._data[orderBy.field.field_path] + v2 = doc2._data[orderBy.field.field_path] + encoded_v1 = _helpers.encode_value(v1) + encoded_v2 = _helpers.encode_value(v2) + comp = Order().compare(encoded_v1, encoded_v2) + + if comp != 0: + # 1 == Ascending, -1 == Descending + return orderBy.direction * comp + + return 0 + + +def _enum_from_op_string(op_string): + """Convert a string representation of a binary operator to an enum. + + These enums come from the protobuf message definition + ``StructuredQuery.FieldFilter.Operator``. + + Args: + op_string (str): A comparison operation in the form of a string. + Acceptable values are ``<``, ``<=``, ``==``, ``>=`` + and ``>``. + + Returns: + int: The enum corresponding to ``op_string``. + + Raises: + ValueError: If ``op_string`` is not a valid operator. + """ + try: + return _COMPARISON_OPERATORS[op_string] + except KeyError: + choices = ", ".join(sorted(_COMPARISON_OPERATORS.keys())) + msg = _BAD_OP_STRING.format(op_string, choices) + raise ValueError(msg) + + +def _isnan(value): + """Check if a value is NaN. + + This differs from ``math.isnan`` in that **any** input type is + allowed. + + Args: + value (Any): A value to check for NaN-ness. + + Returns: + bool: Indicates if the value is the NaN float. + """ + if isinstance(value, float): + return math.isnan(value) + else: + return False + + +def _enum_from_direction(direction): + """Convert a string representation of a direction to an enum. + + Args: + direction (str): A direction to order by. Must be one of + :attr:`~.firestore.Query.ASCENDING` or + :attr:`~.firestore.Query.DESCENDING`. + + Returns: + int: The enum corresponding to ``direction``. + + Raises: + ValueError: If ``direction`` is not a valid direction. + """ + if isinstance(direction, int): + return direction + + if direction == Query.ASCENDING: + return enums.StructuredQuery.Direction.ASCENDING + elif direction == Query.DESCENDING: + return enums.StructuredQuery.Direction.DESCENDING + else: + msg = _BAD_DIR_STRING.format(direction, Query.ASCENDING, Query.DESCENDING) + raise ValueError(msg) + + +def _filter_pb(field_or_unary): + """Convert a specific protobuf filter to the generic filter type. + + Args: + field_or_unary (Union[google.cloud.proto.firestore.v1.\ + query_pb2.StructuredQuery.FieldFilter, google.cloud.proto.\ + firestore.v1.query_pb2.StructuredQuery.FieldFilter]): A + field or unary filter to convert to a generic filter. + + Returns: + google.cloud.firestore_v1.types.\ + StructuredQuery.Filter: A "generic" filter. + + Raises: + ValueError: If ``field_or_unary`` is not a field or unary filter. + """ + if isinstance(field_or_unary, query_pb2.StructuredQuery.FieldFilter): + return query_pb2.StructuredQuery.Filter(field_filter=field_or_unary) + elif isinstance(field_or_unary, query_pb2.StructuredQuery.UnaryFilter): + return query_pb2.StructuredQuery.Filter(unary_filter=field_or_unary) + else: + raise ValueError("Unexpected filter type", type(field_or_unary), field_or_unary) + + +def _cursor_pb(cursor_pair): + """Convert a cursor pair to a protobuf. + + If ``cursor_pair`` is :data:`None`, just returns :data:`None`. + + Args: + cursor_pair (Optional[Tuple[list, bool]]): Two-tuple of + + * a list of field values. + * a ``before`` flag + + Returns: + Optional[google.cloud.firestore_v1.types.Cursor]: A + protobuf cursor corresponding to the values. + """ + if cursor_pair is not None: + data, before = cursor_pair + value_pbs = [_helpers.encode_value(value) for value in data] + return query_pb2.Cursor(values=value_pbs, before=before) + + +def _query_response_to_snapshot(response_pb, collection, expected_prefix): + """Parse a query response protobuf to a document snapshot. + + Args: + response_pb (google.cloud.proto.firestore.v1.\ + firestore_pb2.RunQueryResponse): A + collection (~.firestore_v1.collection.CollectionReference): A + reference to the collection that initiated the query. + expected_prefix (str): The expected prefix for fully-qualified + document names returned in the query results. This can be computed + directly from ``collection`` via :meth:`_parent_info`. + + Returns: + Optional[~.firestore.document.DocumentSnapshot]: A + snapshot of the data returned in the query. If ``response_pb.document`` + is not set, the snapshot will be :data:`None`. + """ + if not response_pb.HasField("document"): + return None + + document_id = _helpers.get_doc_id(response_pb.document, expected_prefix) + reference = collection.document(document_id) + data = _helpers.decode_dict(response_pb.document.fields, collection._client) + snapshot = document.DocumentSnapshot( + reference, + data, + exists=True, + read_time=response_pb.read_time, + create_time=response_pb.document.create_time, + update_time=response_pb.document.update_time, + ) + return snapshot diff --git a/firestore/google/cloud/firestore_v1/transaction.py b/firestore/google/cloud/firestore_v1/transaction.py new file mode 100644 index 000000000000..5570e38b8305 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/transaction.py @@ -0,0 +1,409 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for applying Google Cloud Firestore changes in a transaction.""" + + +import random +import time + +import six + +from google.api_core import exceptions +from google.cloud.firestore_v1 import batch +from google.cloud.firestore_v1 import types + + +MAX_ATTEMPTS = 5 +"""int: Default number of transaction attempts (with retries).""" +_CANT_BEGIN = "The transaction has already begun. Current transaction ID: {!r}." +_MISSING_ID_TEMPLATE = "The transaction has no transaction ID, so it cannot be {}." +_CANT_ROLLBACK = _MISSING_ID_TEMPLATE.format("rolled back") +_CANT_COMMIT = _MISSING_ID_TEMPLATE.format("committed") +_WRITE_READ_ONLY = "Cannot perform write operation in read-only transaction." +_INITIAL_SLEEP = 1.0 +"""float: Initial "max" for sleep interval. To be used in :func:`_sleep`.""" +_MAX_SLEEP = 30.0 +"""float: Eventual "max" sleep time. To be used in :func:`_sleep`.""" +_MULTIPLIER = 2.0 +"""float: Multiplier for exponential backoff. To be used in :func:`_sleep`.""" +_EXCEED_ATTEMPTS_TEMPLATE = "Failed to commit transaction in {:d} attempts." +_CANT_RETRY_READ_ONLY = "Only read-write transactions can be retried." + + +class Transaction(batch.WriteBatch): + """Accumulate read-and-write operations to be sent in a transaction. + + Args: + client (~.firestore_v1.client.Client): The client that + created this transaction. + max_attempts (Optional[int]): The maximum number of attempts for + the transaction (i.e. allowing retries). Defaults to + :attr:`~.firestore_v1.transaction.MAX_ATTEMPTS`. + read_only (Optional[bool]): Flag indicating if the transaction + should be read-only or should allow writes. Defaults to + :data:`False`. + """ + + def __init__(self, client, max_attempts=MAX_ATTEMPTS, read_only=False): + super(Transaction, self).__init__(client) + self._max_attempts = max_attempts + self._read_only = read_only + self._id = None + + def _add_write_pbs(self, write_pbs): + """Add `Write`` protobufs to this transaction. + + Args: + write_pbs (List[google.cloud.proto.firestore.v1.\ + write_pb2.Write]): A list of write protobufs to be added. + + Raises: + ValueError: If this transaction is read-only. + """ + if self._read_only: + raise ValueError(_WRITE_READ_ONLY) + + super(Transaction, self)._add_write_pbs(write_pbs) + + def _options_protobuf(self, retry_id): + """Convert the current object to protobuf. + + The ``retry_id`` value is used when retrying a transaction that + failed (e.g. due to contention). It is intended to be the "first" + transaction that failed (i.e. if multiple retries are needed). + + Args: + retry_id (Union[bytes, NoneType]): Transaction ID of a transaction + to be retried. + + Returns: + Optional[google.cloud.firestore_v1.types.TransactionOptions]: + The protobuf ``TransactionOptions`` if ``read_only==True`` or if + there is a transaction ID to be retried, else :data:`None`. + + Raises: + ValueError: If ``retry_id`` is not :data:`None` but the + transaction is read-only. + """ + if retry_id is not None: + if self._read_only: + raise ValueError(_CANT_RETRY_READ_ONLY) + + return types.TransactionOptions( + read_write=types.TransactionOptions.ReadWrite( + retry_transaction=retry_id + ) + ) + elif self._read_only: + return types.TransactionOptions( + read_only=types.TransactionOptions.ReadOnly() + ) + else: + return None + + @property + def in_progress(self): + """Determine if this transaction has already begun. + + Returns: + bool: Indicates if the transaction has started. + """ + return self._id is not None + + @property + def id(self): + """Get the current transaction ID. + + Returns: + Optional[bytes]: The transaction ID (or :data:`None` if the + current transaction is not in progress). + """ + return self._id + + def _begin(self, retry_id=None): + """Begin the transaction. + + Args: + retry_id (Optional[bytes]): Transaction ID of a transaction to be + retried. + + Raises: + ValueError: If the current transaction has already begun. + """ + if self.in_progress: + msg = _CANT_BEGIN.format(self._id) + raise ValueError(msg) + + transaction_response = self._client._firestore_api.begin_transaction( + self._client._database_string, + options_=self._options_protobuf(retry_id), + metadata=self._client._rpc_metadata, + ) + self._id = transaction_response.transaction + + def _clean_up(self): + """Clean up the instance after :meth:`_rollback`` or :meth:`_commit``. + + This intended to occur on success or failure of the associated RPCs. + """ + self._write_pbs = [] + self._id = None + + def _rollback(self): + """Roll back the transaction. + + Raises: + ValueError: If no transaction is in progress. + """ + if not self.in_progress: + raise ValueError(_CANT_ROLLBACK) + + try: + # NOTE: The response is just ``google.protobuf.Empty``. + self._client._firestore_api.rollback( + self._client._database_string, + self._id, + metadata=self._client._rpc_metadata, + ) + finally: + self._clean_up() + + def _commit(self): + """Transactionally commit the changes accumulated. + + Returns: + List[google.cloud.proto.firestore.v1.\ + write_pb2.WriteResult, ...]: The write results corresponding + to the changes committed, returned in the same order as the + changes were applied to this transaction. A write result contains + an ``update_time`` field. + + Raises: + ValueError: If no transaction is in progress. + """ + if not self.in_progress: + raise ValueError(_CANT_COMMIT) + + commit_response = _commit_with_retry(self._client, self._write_pbs, self._id) + + self._clean_up() + return list(commit_response.write_results) + + +class _Transactional(object): + """Provide a callable object to use as a transactional decorater. + + This is surfaced via + :func:`~.firestore_v1.transaction.transactional`. + + Args: + to_wrap (Callable[~.firestore_v1.transaction.Transaction, \ + Any]): A callable that should be run (and retried) in a + transaction. + """ + + def __init__(self, to_wrap): + self.to_wrap = to_wrap + self.current_id = None + """Optional[bytes]: The current transaction ID.""" + self.retry_id = None + """Optional[bytes]: The ID of the first attempted transaction.""" + + def _reset(self): + """Unset the transaction IDs.""" + self.current_id = None + self.retry_id = None + + def _pre_commit(self, transaction, *args, **kwargs): + """Begin transaction and call the wrapped callable. + + If the callable raises an exception, the transaction will be rolled + back. If not, the transaction will be "ready" for ``Commit`` (i.e. + it will have staged writes). + + Args: + transaction (~.firestore_v1.transaction.Transaction): A + transaction to execute the callable within. + args (Tuple[Any, ...]): The extra positional arguments to pass + along to the wrapped callable. + kwargs (Dict[str, Any]): The extra keyword arguments to pass + along to the wrapped callable. + + Returns: + Any: result of the wrapped callable. + + Raises: + Exception: Any failure caused by ``to_wrap``. + """ + # Force the ``transaction`` to be not "in progress". + transaction._clean_up() + transaction._begin(retry_id=self.retry_id) + + # Update the stored transaction IDs. + self.current_id = transaction._id + if self.retry_id is None: + self.retry_id = self.current_id + try: + return self.to_wrap(transaction, *args, **kwargs) + except: # noqa + # NOTE: If ``rollback`` fails this will lose the information + # from the original failure. + transaction._rollback() + raise + + def _maybe_commit(self, transaction): + """Try to commit the transaction. + + If the transaction is read-write and the ``Commit`` fails with the + ``ABORTED`` status code, it will be retried. Any other failure will + not be caught. + + Args: + transaction (~.firestore_v1.transaction.Transaction): The + transaction to be ``Commit``-ed. + + Returns: + bool: Indicating if the commit succeeded. + """ + try: + transaction._commit() + return True + except exceptions.GoogleAPICallError as exc: + if transaction._read_only: + raise + + if isinstance(exc, exceptions.Aborted): + # If a read-write transaction returns ABORTED, retry. + return False + else: + raise + + def __call__(self, transaction, *args, **kwargs): + """Execute the wrapped callable within a transaction. + + Args: + transaction (~.firestore_v1.transaction.Transaction): A + transaction to execute the callable within. + args (Tuple[Any, ...]): The extra positional arguments to pass + along to the wrapped callable. + kwargs (Dict[str, Any]): The extra keyword arguments to pass + along to the wrapped callable. + + Returns: + Any: The result of the wrapped callable. + + Raises: + ValueError: If the transaction does not succeed in + ``max_attempts``. + """ + self._reset() + + for attempt in six.moves.xrange(transaction._max_attempts): + result = self._pre_commit(transaction, *args, **kwargs) + succeeded = self._maybe_commit(transaction) + if succeeded: + return result + + # Subsequent requests will use the failed transaction ID as part of + # the ``BeginTransactionRequest`` when restarting this transaction + # (via ``options.retry_transaction``). This preserves the "spot in + # line" of the transaction, so exponential backoff is not required + # in this case. + + transaction._rollback() + msg = _EXCEED_ATTEMPTS_TEMPLATE.format(transaction._max_attempts) + raise ValueError(msg) + + +def transactional(to_wrap): + """Decorate a callable so that it runs in a transaction. + + Args: + to_wrap (Callable[~.firestore_v1.transaction.Transaction, \ + Any]): A callable that should be run (and retried) in a + transaction. + + Returns: + Callable[~.firestore_v1.transaction.Transaction, Any]: the + wrapped callable. + """ + return _Transactional(to_wrap) + + +def _commit_with_retry(client, write_pbs, transaction_id): + """Call ``Commit`` on the GAPIC client with retry / sleep. + + Retries the ``Commit`` RPC on Unavailable. Usually this RPC-level + retry is handled by the underlying GAPICd client, but in this case it + doesn't because ``Commit`` is not always idempotent. But here we know it + is "idempotent"-like because it has a transaction ID. We also need to do + our own retry to special-case the ``INVALID_ARGUMENT`` error. + + Args: + client (~.firestore_v1.client.Client): A client with + GAPIC client and configuration details. + write_pbs (List[google.cloud.proto.firestore.v1.\ + write_pb2.Write, ...]): A ``Write`` protobuf instance to + be committed. + transaction_id (bytes): ID of an existing transaction that + this commit will run in. + + Returns: + google.cloud.firestore_v1.types.CommitResponse: + The protobuf response from ``Commit``. + + Raises: + ~google.api_core.exceptions.GoogleAPICallError: If a non-retryable + exception is encountered. + """ + current_sleep = _INITIAL_SLEEP + while True: + try: + return client._firestore_api.commit( + client._database_string, + write_pbs, + transaction=transaction_id, + metadata=client._rpc_metadata, + ) + except exceptions.ServiceUnavailable: + # Retry + pass + + current_sleep = _sleep(current_sleep) + + +def _sleep(current_sleep, max_sleep=_MAX_SLEEP, multiplier=_MULTIPLIER): + """Sleep and produce a new sleep time. + + .. _Exponential Backoff And Jitter: https://www.awsarchitectureblog.com/\ + 2015/03/backoff.html + + Select a duration between zero and ``current_sleep``. It might seem + counterintuitive to have so much jitter, but + `Exponential Backoff And Jitter`_ argues that "full jitter" is + the best strategy. + + Args: + current_sleep (float): The current "max" for sleep interval. + max_sleep (Optional[float]): Eventual "max" sleep time + multiplier (Optional[float]): Multiplier for exponential backoff. + + Returns: + float: Newly doubled ``current_sleep`` or ``max_sleep`` (whichever + is smaller) + """ + actual_sleep = random.uniform(0.0, current_sleep) + time.sleep(actual_sleep) + return min(multiplier * current_sleep, max_sleep) diff --git a/firestore/google/cloud/firestore_v1/transforms.py b/firestore/google/cloud/firestore_v1/transforms.py new file mode 100644 index 000000000000..be3f40a5b422 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/transforms.py @@ -0,0 +1,90 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpful constants to use for Google Cloud Firestore.""" + + +class Sentinel(object): + """Sentinel objects used to signal special handling.""" + + __slots__ = ("description",) + + def __init__(self, description): + self.description = description + + def __repr__(self): + return "Sentinel: {}".format(self.description) + + +DELETE_FIELD = Sentinel("Value used to delete a field in a document.") + + +SERVER_TIMESTAMP = Sentinel( + "Value used to set a document field to the server timestamp." +) + + +class _ValueList(object): + """Read-only list of values. + + Args: + values (List | Tuple): values held in the helper. + """ + + slots = ("_values",) + + def __init__(self, values): + if not isinstance(values, (list, tuple)): + raise ValueError("'values' must be a list or tuple.") + + if len(values) == 0: + raise ValueError("'values' must be non-empty.") + + self._values = list(values) + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return NotImplemented + return self._values == other._values + + @property + def values(self): + """Values to append. + + Returns (List): + values to be appended by the transform. + """ + return self._values + + +class ArrayUnion(_ValueList): + """Field transform: appends missing values to an array field. + + See: + https://cloud.google.com/firestore/docs/reference/rpc/google.firestore.v1#google.firestore.v1.DocumentTransform.FieldTransform.FIELDS.google.firestore.v1.ArrayValue.google.firestore.v1.DocumentTransform.FieldTransform.append_missing_elements + + Args: + values (List | Tuple): values to append. + """ + + +class ArrayRemove(_ValueList): + """Field transform: remove values from an array field. + + See: + https://cloud.google.com/firestore/docs/reference/rpc/google.firestore.v1#google.firestore.v1.DocumentTransform.FieldTransform.FIELDS.google.firestore.v1.ArrayValue.google.firestore.v1.DocumentTransform.FieldTransform.remove_all_from_array + + Args: + values (List | Tuple): values to remove. + """ diff --git a/firestore/google/cloud/firestore_v1/types.py b/firestore/google/cloud/firestore_v1/types.py new file mode 100644 index 000000000000..c4e7c350783d --- /dev/null +++ b/firestore/google/cloud/firestore_v1/types.py @@ -0,0 +1,63 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import sys + +from google.api import http_pb2 +from google.protobuf import any_pb2 +from google.protobuf import descriptor_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import struct_pb2 +from google.protobuf import timestamp_pb2 +from google.protobuf import wrappers_pb2 +from google.rpc import status_pb2 +from google.type import latlng_pb2 + +from google.api_core.protobuf_helpers import get_messages +from google.cloud.firestore_v1.proto import common_pb2 +from google.cloud.firestore_v1.proto import document_pb2 +from google.cloud.firestore_v1.proto import firestore_pb2 +from google.cloud.firestore_v1.proto import query_pb2 +from google.cloud.firestore_v1.proto import write_pb2 + + +_shared_modules = [ + http_pb2, + any_pb2, + descriptor_pb2, + empty_pb2, + struct_pb2, + timestamp_pb2, + wrappers_pb2, + status_pb2, + latlng_pb2, +] + +_local_modules = [common_pb2, document_pb2, firestore_pb2, query_pb2, write_pb2] + +names = [] + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: + for name, message in get_messages(module).items(): + message.__module__ = "google.cloud.firestore_v1.types" + setattr(sys.modules[__name__], name, message) + names.append(name) + +__all__ = tuple(sorted(names)) diff --git a/firestore/google/cloud/firestore_v1/watch.py b/firestore/google/cloud/firestore_v1/watch.py new file mode 100644 index 000000000000..3e829c6b08b2 --- /dev/null +++ b/firestore/google/cloud/firestore_v1/watch.py @@ -0,0 +1,721 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import collections +import threading +import datetime +from enum import Enum +import functools + +import pytz + +from google.api_core.bidi import ResumableBidiRpc +from google.api_core.bidi import BackgroundConsumer +from google.cloud.firestore_v1.proto import firestore_pb2 +from google.cloud.firestore_v1 import _helpers + +from google.api_core import exceptions + +import grpc + +"""Python client for Google Cloud Firestore Watch.""" + +_LOGGER = logging.getLogger(__name__) + +WATCH_TARGET_ID = 0x5079 # "Py" + +GRPC_STATUS_CODE = { + "OK": 0, + "CANCELLED": 1, + "UNKNOWN": 2, + "INVALID_ARGUMENT": 3, + "DEADLINE_EXCEEDED": 4, + "NOT_FOUND": 5, + "ALREADY_EXISTS": 6, + "PERMISSION_DENIED": 7, + "UNAUTHENTICATED": 16, + "RESOURCE_EXHAUSTED": 8, + "FAILED_PRECONDITION": 9, + "ABORTED": 10, + "OUT_OF_RANGE": 11, + "UNIMPLEMENTED": 12, + "INTERNAL": 13, + "UNAVAILABLE": 14, + "DATA_LOSS": 15, + "DO_NOT_USE": -1, +} +_RPC_ERROR_THREAD_NAME = "Thread-OnRpcTerminated" +_RETRYABLE_STREAM_ERRORS = ( + exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, + exceptions.InternalServerError, + exceptions.Unknown, + exceptions.GatewayTimeout, +) + +DocTreeEntry = collections.namedtuple("DocTreeEntry", ["value", "index"]) + + +class WatchDocTree(object): + # TODO: Currently this uses a dict. Other implementations us an rbtree. + # The performance of this implementation should be investigated and may + # require modifying the underlying datastructure to a rbtree. + def __init__(self): + self._dict = {} + self._index = 0 + + def keys(self): + return list(self._dict.keys()) + + def _copy(self): + wdt = WatchDocTree() + wdt._dict = self._dict.copy() + wdt._index = self._index + self = wdt + return self + + def insert(self, key, value): + self = self._copy() + self._dict[key] = DocTreeEntry(value, self._index) + self._index += 1 + return self + + def find(self, key): + return self._dict[key] + + def remove(self, key): + self = self._copy() + del self._dict[key] + return self + + def __iter__(self): + for k in self._dict: + yield k + + def __len__(self): + return len(self._dict) + + def __contains__(self, k): + return k in self._dict + + +class ChangeType(Enum): + ADDED = 1 + REMOVED = 2 + MODIFIED = 3 + + +class DocumentChange(object): + def __init__(self, type, document, old_index, new_index): + """DocumentChange + + Args: + type (ChangeType): + document (document.DocumentSnapshot): + old_index (int): + new_index (int): + """ + # TODO: spec indicated an isEqual param also + self.type = type + self.document = document + self.old_index = old_index + self.new_index = new_index + + +class WatchResult(object): + def __init__(self, snapshot, name, change_type): + self.snapshot = snapshot + self.name = name + self.change_type = change_type + + +def _maybe_wrap_exception(exception): + """Wraps a gRPC exception class, if needed.""" + if isinstance(exception, grpc.RpcError): + return exceptions.from_grpc_error(exception) + return exception + + +def document_watch_comparator(doc1, doc2): + assert doc1 == doc2, "Document watches only support one document." + return 0 + + +class Watch(object): + + BackgroundConsumer = BackgroundConsumer # FBO unit tests + ResumableBidiRpc = ResumableBidiRpc # FBO unit tests + + def __init__( + self, + document_reference, + firestore, + target, + comparator, + snapshot_callback, + document_snapshot_cls, + document_reference_cls, + BackgroundConsumer=None, # FBO unit testing + ResumableBidiRpc=None, # FBO unit testing + ): + """ + Args: + firestore: + target: + comparator: + snapshot_callback: Callback method to process snapshots. + Args: + docs (List(DocumentSnapshot)): A callback that returns the + ordered list of documents stored in this snapshot. + changes (List(str)): A callback that returns the list of + changed documents since the last snapshot delivered for + this watch. + read_time (string): The ISO 8601 time at which this + snapshot was obtained. + + document_snapshot_cls: instance of DocumentSnapshot + document_reference_cls: instance of DocumentReference + """ + self._document_reference = document_reference + self._firestore = firestore + self._api = firestore._firestore_api + self._targets = target + self._comparator = comparator + self.DocumentSnapshot = document_snapshot_cls + self.DocumentReference = document_reference_cls + self._snapshot_callback = snapshot_callback + self._closing = threading.Lock() + self._closed = False + + def should_recover(exc): # pragma: NO COVER + return ( + isinstance(exc, grpc.RpcError) + and exc.code() == grpc.StatusCode.UNAVAILABLE + ) + + initial_request = firestore_pb2.ListenRequest( + database=self._firestore._database_string, add_target=self._targets + ) + + if ResumableBidiRpc is None: + ResumableBidiRpc = self.ResumableBidiRpc # FBO unit tests + + self._rpc = ResumableBidiRpc( + self._api.transport._stubs["firestore_stub"].Listen, + initial_request=initial_request, + should_recover=should_recover, + ) + + self._rpc.add_done_callback(self._on_rpc_done) + + # Initialize state for on_snapshot + # The sorted tree of QueryDocumentSnapshots as sent in the last + # snapshot. We only look at the keys. + self.doc_tree = WatchDocTree() + + # A map of document names to QueryDocumentSnapshots for the last sent + # snapshot. + self.doc_map = {} + + # The accumulates map of document changes (keyed by document name) for + # the current snapshot. + self.change_map = {} + + # The current state of the query results. + self.current = False + + # We need this to track whether we've pushed an initial set of changes, + # since we should push those even when there are no changes, if there + # aren't docs. + self.has_pushed = False + + # The server assigns and updates the resume token. + self.resume_token = None + if BackgroundConsumer is None: # FBO unit tests + BackgroundConsumer = self.BackgroundConsumer + + self._consumer = BackgroundConsumer(self._rpc, self.on_snapshot) + self._consumer.start() + + @property + def is_active(self): + """bool: True if this manager is actively streaming. + + Note that ``False`` does not indicate this is complete shut down, + just that it stopped getting new messages. + """ + return self._consumer is not None and self._consumer.is_active + + def close(self, reason=None): + """Stop consuming messages and shutdown all helper threads. + + This method is idempotent. Additional calls will have no effect. + + Args: + reason (Any): The reason to close this. If None, this is considered + an "intentional" shutdown. + """ + with self._closing: + if self._closed: + return + + # Stop consuming messages. + if self.is_active: + _LOGGER.debug("Stopping consumer.") + self._consumer.stop() + self._consumer = None + + self._rpc.close() + self._rpc = None + self._closed = True + _LOGGER.debug("Finished stopping manager.") + + if reason: + # Raise an exception if a reason is provided + _LOGGER.debug("reason for closing: %s" % reason) + if isinstance(reason, Exception): + raise reason + raise RuntimeError(reason) + + def _on_rpc_done(self, future): + """Triggered whenever the underlying RPC terminates without recovery. + + This is typically triggered from one of two threads: the background + consumer thread (when calling ``recv()`` produces a non-recoverable + error) or the grpc management thread (when cancelling the RPC). + + This method is *non-blocking*. It will start another thread to deal + with shutting everything down. This is to prevent blocking in the + background consumer and preventing it from being ``joined()``. + """ + _LOGGER.info("RPC termination has signaled manager shutdown.") + future = _maybe_wrap_exception(future) + thread = threading.Thread( + name=_RPC_ERROR_THREAD_NAME, target=self.close, kwargs={"reason": future} + ) + thread.daemon = True + thread.start() + + def unsubscribe(self): + self.close() + + @classmethod + def for_document( + cls, + document_ref, + snapshot_callback, + snapshot_class_instance, + reference_class_instance, + ): + """ + Creates a watch snapshot listener for a document. snapshot_callback + receives a DocumentChange object, but may also start to get + targetChange and such soon + + Args: + document_ref: Reference to Document + snapshot_callback: callback to be called on snapshot + snapshot_class_instance: instance of DocumentSnapshot to make + snapshots with to pass to snapshot_callback + reference_class_instance: instance of DocumentReference to make + references + + """ + return cls( + document_ref, + document_ref._client, + { + "documents": {"documents": [document_ref._document_path]}, + "target_id": WATCH_TARGET_ID, + }, + document_watch_comparator, + snapshot_callback, + snapshot_class_instance, + reference_class_instance, + ) + + @classmethod + def for_query( + cls, query, snapshot_callback, snapshot_class_instance, reference_class_instance + ): + query_target = firestore_pb2.Target.QueryTarget( + parent=query._client._database_string, structured_query=query._to_protobuf() + ) + + return cls( + query, + query._client, + {"query": query_target, "target_id": WATCH_TARGET_ID}, + query._comparator, + snapshot_callback, + snapshot_class_instance, + reference_class_instance, + ) + + def _on_snapshot_target_change_no_change(self, proto): + _LOGGER.debug("on_snapshot: target change: NO_CHANGE") + change = proto.target_change + + no_target_ids = change.target_ids is None or len(change.target_ids) == 0 + if no_target_ids and change.read_time and self.current: + # TargetChange.CURRENT followed by TargetChange.NO_CHANGE + # signals a consistent state. Invoke the onSnapshot + # callback as specified by the user. + self.push(change.read_time, change.resume_token) + + def _on_snapshot_target_change_add(self, proto): + _LOGGER.debug("on_snapshot: target change: ADD") + target_id = proto.target_change.target_ids[0] + if target_id != WATCH_TARGET_ID: + raise RuntimeError("Unexpected target ID %s sent by server" % target_id) + + def _on_snapshot_target_change_remove(self, proto): + _LOGGER.debug("on_snapshot: target change: REMOVE") + change = proto.target_change + + code = 13 + message = "internal error" + if change.cause: + code = change.cause.code + message = change.cause.message + + message = "Error %s: %s" % (code, message) + + raise RuntimeError(message) + + def _on_snapshot_target_change_reset(self, proto): + # Whatever changes have happened so far no longer matter. + _LOGGER.debug("on_snapshot: target change: RESET") + self._reset_docs() + + def _on_snapshot_target_change_current(self, proto): + _LOGGER.debug("on_snapshot: target change: CURRENT") + self.current = True + + def on_snapshot(self, proto): + """ + Called everytime there is a response from listen. Collect changes + and 'push' the changes in a batch to the customer when we receive + 'current' from the listen response. + + Args: + listen_response(`google.cloud.firestore_v1.types.ListenResponse`): + Callback method that receives a object to + """ + TargetChange = firestore_pb2.TargetChange + + target_changetype_dispatch = { + TargetChange.NO_CHANGE: self._on_snapshot_target_change_no_change, + TargetChange.ADD: self._on_snapshot_target_change_add, + TargetChange.REMOVE: self._on_snapshot_target_change_remove, + TargetChange.RESET: self._on_snapshot_target_change_reset, + TargetChange.CURRENT: self._on_snapshot_target_change_current, + } + + target_change = proto.target_change + if str(target_change): + target_change_type = target_change.target_change_type + _LOGGER.debug("on_snapshot: target change: " + str(target_change_type)) + meth = target_changetype_dispatch.get(target_change_type) + if meth is None: + _LOGGER.info( + "on_snapshot: Unknown target change " + str(target_change_type) + ) + self.close( + reason="Unknown target change type: %s " % str(target_change_type) + ) + else: + try: + meth(proto) + except Exception as exc2: + _LOGGER.debug("meth(proto) exc: " + str(exc2)) + raise + + # NOTE: + # in other implementations, such as node, the backoff is reset here + # in this version bidi rpc is just used and will control this. + + elif str(proto.document_change): + _LOGGER.debug("on_snapshot: document change") + + # No other target_ids can show up here, but we still need to see + # if the targetId was in the added list or removed list. + target_ids = proto.document_change.target_ids or [] + removed_target_ids = proto.document_change.removed_target_ids or [] + changed = False + removed = False + + if WATCH_TARGET_ID in target_ids: + changed = True + + if WATCH_TARGET_ID in removed_target_ids: + removed = True + + if changed: + _LOGGER.debug("on_snapshot: document change: CHANGED") + + # google.cloud.firestore_v1.types.DocumentChange + document_change = proto.document_change + # google.cloud.firestore_v1.types.Document + document = document_change.document + + data = _helpers.decode_dict(document.fields, self._firestore) + + # Create a snapshot. As Document and Query objects can be + # passed we need to get a Document Reference in a more manual + # fashion than self._document_reference + document_name = document.name + db_str = self._firestore._database_string + db_str_documents = db_str + "/documents/" + if document_name.startswith(db_str_documents): + document_name = document_name[len(db_str_documents) :] + + document_ref = self._firestore.document(document_name) + + snapshot = self.DocumentSnapshot( + reference=document_ref, + data=data, + exists=True, + read_time=None, + create_time=document.create_time, + update_time=document.update_time, + ) + self.change_map[document.name] = snapshot + + elif removed: + _LOGGER.debug("on_snapshot: document change: REMOVED") + document = proto.document_change.document + self.change_map[document.name] = ChangeType.REMOVED + + # NB: document_delete and document_remove (as far as we, the client, + # are concerned) are functionally equivalent + + elif str(proto.document_delete): + _LOGGER.debug("on_snapshot: document change: DELETE") + name = proto.document_delete.document + self.change_map[name] = ChangeType.REMOVED + + elif str(proto.document_remove): + _LOGGER.debug("on_snapshot: document change: REMOVE") + name = proto.document_remove.document + self.change_map[name] = ChangeType.REMOVED + + elif proto.filter: + _LOGGER.debug("on_snapshot: filter update") + if proto.filter.count != self._current_size(): + # We need to remove all the current results. + self._reset_docs() + # The filter didn't match, so re-issue the query. + # TODO: reset stream method? + # self._reset_stream(); + + else: + _LOGGER.debug("UNKNOWN TYPE. UHOH") + self.close(reason=ValueError("Unknown listen response type: %s" % proto)) + + def push(self, read_time, next_resume_token): + """ + Assembles a new snapshot from the current set of changes and invokes + the user's callback. Clears the current changes on completion. + """ + deletes, adds, updates = Watch._extract_changes( + self.doc_map, self.change_map, read_time + ) + + updated_tree, updated_map, appliedChanges = self._compute_snapshot( + self.doc_tree, self.doc_map, deletes, adds, updates + ) + + if not self.has_pushed or len(appliedChanges): + # TODO: It is possible in the future we will have the tree order + # on insert. For now, we sort here. + key = functools.cmp_to_key(self._comparator) + keys = sorted(updated_tree.keys(), key=key) + + self._snapshot_callback( + keys, + appliedChanges, + datetime.datetime.fromtimestamp(read_time.seconds, pytz.utc), + ) + self.has_pushed = True + + self.doc_tree = updated_tree + self.doc_map = updated_map + self.change_map.clear() + self.resume_token = next_resume_token + + @staticmethod + def _extract_changes(doc_map, changes, read_time): + deletes = [] + adds = [] + updates = [] + + for name, value in changes.items(): + if value == ChangeType.REMOVED: + if name in doc_map: + deletes.append(name) + elif name in doc_map: + if read_time is not None: + value.read_time = read_time + updates.append(value) + else: + if read_time is not None: + value.read_time = read_time + adds.append(value) + + return (deletes, adds, updates) + + def _compute_snapshot( + self, doc_tree, doc_map, delete_changes, add_changes, update_changes + ): + updated_tree = doc_tree + updated_map = doc_map + + assert len(doc_tree) == len(doc_map), ( + "The document tree and document map should have the same " + + "number of entries." + ) + + def delete_doc(name, updated_tree, updated_map): + """ + Applies a document delete to the document tree and document map. + Returns the corresponding DocumentChange event. + """ + assert name in updated_map, "Document to delete does not exist" + old_document = updated_map.get(name) + # TODO: If a document doesn't exist this raises IndexError. Handle? + existing = updated_tree.find(old_document) + old_index = existing.index + updated_tree = updated_tree.remove(old_document) + del updated_map[name] + return ( + DocumentChange(ChangeType.REMOVED, old_document, old_index, -1), + updated_tree, + updated_map, + ) + + def add_doc(new_document, updated_tree, updated_map): + """ + Applies a document add to the document tree and the document map. + Returns the corresponding DocumentChange event. + """ + name = new_document.reference._document_path + assert name not in updated_map, "Document to add already exists" + updated_tree = updated_tree.insert(new_document, None) + new_index = updated_tree.find(new_document).index + updated_map[name] = new_document + return ( + DocumentChange(ChangeType.ADDED, new_document, -1, new_index), + updated_tree, + updated_map, + ) + + def modify_doc(new_document, updated_tree, updated_map): + """ + Applies a document modification to the document tree and the + document map. + Returns the DocumentChange event for successful modifications. + """ + name = new_document.reference._document_path + assert name in updated_map, "Document to modify does not exist" + old_document = updated_map.get(name) + if old_document.update_time != new_document.update_time: + remove_change, updated_tree, updated_map = delete_doc( + name, updated_tree, updated_map + ) + add_change, updated_tree, updated_map = add_doc( + new_document, updated_tree, updated_map + ) + return ( + DocumentChange( + ChangeType.MODIFIED, + new_document, + remove_change.old_index, + add_change.new_index, + ), + updated_tree, + updated_map, + ) + + return None, updated_tree, updated_map + + # Process the sorted changes in the order that is expected by our + # clients (removals, additions, and then modifications). We also need + # to sort the individual changes to assure that old_index/new_index + # keep incrementing. + appliedChanges = [] + + key = functools.cmp_to_key(self._comparator) + + # Deletes are sorted based on the order of the existing document. + delete_changes = sorted(delete_changes, key=key) + for name in delete_changes: + change, updated_tree, updated_map = delete_doc( + name, updated_tree, updated_map + ) + appliedChanges.append(change) + + add_changes = sorted(add_changes, key=key) + _LOGGER.debug("walk over add_changes") + for snapshot in add_changes: + _LOGGER.debug("in add_changes") + change, updated_tree, updated_map = add_doc( + snapshot, updated_tree, updated_map + ) + appliedChanges.append(change) + + update_changes = sorted(update_changes, key=key) + for snapshot in update_changes: + change, updated_tree, updated_map = modify_doc( + snapshot, updated_tree, updated_map + ) + if change is not None: + appliedChanges.append(change) + + assert len(updated_tree) == len(updated_map), ( + "The update document " + + "tree and document map should have the same number of entries." + ) + return (updated_tree, updated_map, appliedChanges) + + def _affects_target(self, target_ids, current_id): + if target_ids is None: + return True + + return current_id in target_ids + + def _current_size(self): + """ + Returns the current count of all documents, including the changes from + the current changeMap. + """ + deletes, adds, _ = Watch._extract_changes(self.doc_map, self.change_map, None) + return len(self.doc_map) + len(adds) - len(deletes) + + def _reset_docs(self): + """ + Helper to clear the docs on RESET or filter mismatch. + """ + _LOGGER.debug("resetting documents") + self.change_map.clear() + self.resume_token = None + + # Mark each document as deleted. If documents are not deleted + # they will be sent again by the server. + for snapshot in self.doc_tree.keys(): + name = snapshot.reference._document_path + self.change_map[name] = ChangeType.REMOVED + + self.current = False diff --git a/firestore/tests/unit/v1/__init__.py b/firestore/tests/unit/v1/__init__.py new file mode 100644 index 000000000000..ab6729095248 --- /dev/null +++ b/firestore/tests/unit/v1/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/firestore/tests/unit/v1/test__helpers.py b/firestore/tests/unit/v1/test__helpers.py new file mode 100644 index 000000000000..3f54b6751571 --- /dev/null +++ b/firestore/tests/unit/v1/test__helpers.py @@ -0,0 +1,2089 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import sys +import unittest + +import mock + + +class TestGeoPoint(unittest.TestCase): + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1._helpers import GeoPoint + + return GeoPoint + + def _make_one(self, *args, **kwargs): + klass = self._get_target_class() + return klass(*args, **kwargs) + + def test_constructor(self): + lat = 81.25 + lng = 359.984375 + geo_pt = self._make_one(lat, lng) + self.assertEqual(geo_pt.latitude, lat) + self.assertEqual(geo_pt.longitude, lng) + + def test_to_protobuf(self): + from google.type import latlng_pb2 + + lat = 0.015625 + lng = 20.03125 + geo_pt = self._make_one(lat, lng) + result = geo_pt.to_protobuf() + geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) + self.assertEqual(result, geo_pt_pb) + + def test___eq__(self): + lat = 0.015625 + lng = 20.03125 + geo_pt1 = self._make_one(lat, lng) + geo_pt2 = self._make_one(lat, lng) + self.assertEqual(geo_pt1, geo_pt2) + + def test___eq__type_differ(self): + lat = 0.015625 + lng = 20.03125 + geo_pt1 = self._make_one(lat, lng) + geo_pt2 = object() + self.assertNotEqual(geo_pt1, geo_pt2) + self.assertIs(geo_pt1.__eq__(geo_pt2), NotImplemented) + + def test___ne__same_value(self): + lat = 0.015625 + lng = 20.03125 + geo_pt1 = self._make_one(lat, lng) + geo_pt2 = self._make_one(lat, lng) + comparison_val = geo_pt1 != geo_pt2 + self.assertFalse(comparison_val) + + def test___ne__(self): + geo_pt1 = self._make_one(0.0, 1.0) + geo_pt2 = self._make_one(2.0, 3.0) + self.assertNotEqual(geo_pt1, geo_pt2) + + def test___ne__type_differ(self): + lat = 0.015625 + lng = 20.03125 + geo_pt1 = self._make_one(lat, lng) + geo_pt2 = object() + self.assertNotEqual(geo_pt1, geo_pt2) + self.assertIs(geo_pt1.__ne__(geo_pt2), NotImplemented) + + +class Test_verify_path(unittest.TestCase): + @staticmethod + def _call_fut(path, is_collection): + from google.cloud.firestore_v1._helpers import verify_path + + return verify_path(path, is_collection) + + def test_empty(self): + path = () + with self.assertRaises(ValueError): + self._call_fut(path, True) + with self.assertRaises(ValueError): + self._call_fut(path, False) + + def test_wrong_length_collection(self): + path = ("foo", "bar") + with self.assertRaises(ValueError): + self._call_fut(path, True) + + def test_wrong_length_document(self): + path = ("Kind",) + with self.assertRaises(ValueError): + self._call_fut(path, False) + + def test_wrong_type_collection(self): + path = (99, "ninety-nine", "zap") + with self.assertRaises(ValueError): + self._call_fut(path, True) + + def test_wrong_type_document(self): + path = ("Users", "Ada", "Candy", {}) + with self.assertRaises(ValueError): + self._call_fut(path, False) + + def test_success_collection(self): + path = ("Computer", "Magic", "Win") + ret_val = self._call_fut(path, True) + # NOTE: We are just checking that it didn't fail. + self.assertIsNone(ret_val) + + def test_success_document(self): + path = ("Tokenizer", "Seventeen", "Cheese", "Burger") + ret_val = self._call_fut(path, False) + # NOTE: We are just checking that it didn't fail. + self.assertIsNone(ret_val) + + +class Test_encode_value(unittest.TestCase): + @staticmethod + def _call_fut(value): + from google.cloud.firestore_v1._helpers import encode_value + + return encode_value(value) + + def test_none(self): + from google.protobuf import struct_pb2 + + result = self._call_fut(None) + expected = _value_pb(null_value=struct_pb2.NULL_VALUE) + self.assertEqual(result, expected) + + def test_boolean(self): + result = self._call_fut(True) + expected = _value_pb(boolean_value=True) + self.assertEqual(result, expected) + + def test_integer(self): + value = 425178 + result = self._call_fut(value) + expected = _value_pb(integer_value=value) + self.assertEqual(result, expected) + + def test_float(self): + value = 123.4453125 + result = self._call_fut(value) + expected = _value_pb(double_value=value) + self.assertEqual(result, expected) + + def test_datetime_with_nanos(self): + from google.api_core.datetime_helpers import DatetimeWithNanoseconds + from google.protobuf import timestamp_pb2 + + dt_seconds = 1488768504 + dt_nanos = 458816991 + timestamp_pb = timestamp_pb2.Timestamp(seconds=dt_seconds, nanos=dt_nanos) + dt_val = DatetimeWithNanoseconds.from_timestamp_pb(timestamp_pb) + + result = self._call_fut(dt_val) + expected = _value_pb(timestamp_value=timestamp_pb) + self.assertEqual(result, expected) + + def test_datetime_wo_nanos(self): + from google.protobuf import timestamp_pb2 + + dt_seconds = 1488768504 + dt_nanos = 458816000 + # Make sure precision is valid in microseconds too. + self.assertEqual(dt_nanos % 1000, 0) + dt_val = datetime.datetime.utcfromtimestamp(dt_seconds + 1e-9 * dt_nanos) + + result = self._call_fut(dt_val) + timestamp_pb = timestamp_pb2.Timestamp(seconds=dt_seconds, nanos=dt_nanos) + expected = _value_pb(timestamp_value=timestamp_pb) + self.assertEqual(result, expected) + + def test_string(self): + value = u"\u2018left quote, right quote\u2019" + result = self._call_fut(value) + expected = _value_pb(string_value=value) + self.assertEqual(result, expected) + + def test_bytes(self): + value = b"\xe3\xf2\xff\x00" + result = self._call_fut(value) + expected = _value_pb(bytes_value=value) + self.assertEqual(result, expected) + + def test_reference_value(self): + client = _make_client() + + value = client.document("my", "friend") + result = self._call_fut(value) + expected = _value_pb(reference_value=value._document_path) + self.assertEqual(result, expected) + + def test_geo_point(self): + from google.cloud.firestore_v1._helpers import GeoPoint + + value = GeoPoint(50.5, 88.75) + result = self._call_fut(value) + expected = _value_pb(geo_point_value=value.to_protobuf()) + self.assertEqual(result, expected) + + def test_array(self): + from google.cloud.firestore_v1.proto.document_pb2 import ArrayValue + + result = self._call_fut([99, True, 118.5]) + + array_pb = ArrayValue( + values=[ + _value_pb(integer_value=99), + _value_pb(boolean_value=True), + _value_pb(double_value=118.5), + ] + ) + expected = _value_pb(array_value=array_pb) + self.assertEqual(result, expected) + + def test_map(self): + from google.cloud.firestore_v1.proto.document_pb2 import MapValue + + result = self._call_fut({"abc": 285, "def": b"piglatin"}) + + map_pb = MapValue( + fields={ + "abc": _value_pb(integer_value=285), + "def": _value_pb(bytes_value=b"piglatin"), + } + ) + expected = _value_pb(map_value=map_pb) + self.assertEqual(result, expected) + + def test_bad_type(self): + value = object() + with self.assertRaises(TypeError): + self._call_fut(value) + + +class Test_encode_dict(unittest.TestCase): + @staticmethod + def _call_fut(values_dict): + from google.cloud.firestore_v1._helpers import encode_dict + + return encode_dict(values_dict) + + def test_many_types(self): + from google.protobuf import struct_pb2 + from google.protobuf import timestamp_pb2 + from google.cloud.firestore_v1.proto.document_pb2 import ArrayValue + from google.cloud.firestore_v1.proto.document_pb2 import MapValue + + dt_seconds = 1497397225 + dt_nanos = 465964000 + # Make sure precision is valid in microseconds too. + self.assertEqual(dt_nanos % 1000, 0) + dt_val = datetime.datetime.utcfromtimestamp(dt_seconds + 1e-9 * dt_nanos) + + client = _make_client() + document = client.document("most", "adjective", "thing", "here") + + values_dict = { + "foo": None, + "bar": True, + "baz": 981, + "quux": 2.875, + "quuz": dt_val, + "corge": u"\N{snowman}", + "grault": b"\xe2\x98\x83", + "wibble": document, + "garply": [u"fork", 4.0], + "waldo": {"fred": u"zap", "thud": False}, + } + encoded_dict = self._call_fut(values_dict) + expected_dict = { + "foo": _value_pb(null_value=struct_pb2.NULL_VALUE), + "bar": _value_pb(boolean_value=True), + "baz": _value_pb(integer_value=981), + "quux": _value_pb(double_value=2.875), + "quuz": _value_pb( + timestamp_value=timestamp_pb2.Timestamp( + seconds=dt_seconds, nanos=dt_nanos + ) + ), + "corge": _value_pb(string_value=u"\N{snowman}"), + "grault": _value_pb(bytes_value=b"\xe2\x98\x83"), + "wibble": _value_pb(reference_value=document._document_path), + "garply": _value_pb( + array_value=ArrayValue( + values=[ + _value_pb(string_value=u"fork"), + _value_pb(double_value=4.0), + ] + ) + ), + "waldo": _value_pb( + map_value=MapValue( + fields={ + "fred": _value_pb(string_value=u"zap"), + "thud": _value_pb(boolean_value=False), + } + ) + ), + } + self.assertEqual(encoded_dict, expected_dict) + + +class Test_reference_value_to_document(unittest.TestCase): + @staticmethod + def _call_fut(reference_value, client): + from google.cloud.firestore_v1._helpers import reference_value_to_document + + return reference_value_to_document(reference_value, client) + + def test_bad_format(self): + from google.cloud.firestore_v1._helpers import BAD_REFERENCE_ERROR + + reference_value = "not/the/right/format" + with self.assertRaises(ValueError) as exc_info: + self._call_fut(reference_value, None) + + err_msg = BAD_REFERENCE_ERROR.format(reference_value) + self.assertEqual(exc_info.exception.args, (err_msg,)) + + def test_same_client(self): + from google.cloud.firestore_v1.document import DocumentReference + + client = _make_client() + document = client.document("that", "this") + reference_value = document._document_path + + new_document = self._call_fut(reference_value, client) + self.assertIsNot(new_document, document) + + self.assertIsInstance(new_document, DocumentReference) + self.assertIs(new_document._client, client) + self.assertEqual(new_document._path, document._path) + + def test_different_client(self): + from google.cloud.firestore_v1._helpers import WRONG_APP_REFERENCE + + client1 = _make_client(project="kirk") + document = client1.document("tin", "foil") + reference_value = document._document_path + + client2 = _make_client(project="spock") + with self.assertRaises(ValueError) as exc_info: + self._call_fut(reference_value, client2) + + err_msg = WRONG_APP_REFERENCE.format(reference_value, client2._database_string) + self.assertEqual(exc_info.exception.args, (err_msg,)) + + +class Test_decode_value(unittest.TestCase): + @staticmethod + def _call_fut(value, client=mock.sentinel.client): + from google.cloud.firestore_v1._helpers import decode_value + + return decode_value(value, client) + + def test_none(self): + from google.protobuf import struct_pb2 + + value = _value_pb(null_value=struct_pb2.NULL_VALUE) + self.assertIsNone(self._call_fut(value)) + + def test_bool(self): + value1 = _value_pb(boolean_value=True) + self.assertTrue(self._call_fut(value1)) + value2 = _value_pb(boolean_value=False) + self.assertFalse(self._call_fut(value2)) + + def test_int(self): + int_val = 29871 + value = _value_pb(integer_value=int_val) + self.assertEqual(self._call_fut(value), int_val) + + def test_float(self): + float_val = 85.9296875 + value = _value_pb(double_value=float_val) + self.assertEqual(self._call_fut(value), float_val) + + @unittest.skipIf( + (3,) <= sys.version_info < (3, 4, 4), "known datetime bug (bpo-23517) in Python" + ) + def test_datetime(self): + from google.api_core.datetime_helpers import DatetimeWithNanoseconds + from google.protobuf import timestamp_pb2 + + dt_seconds = 552855006 + dt_nanos = 766961828 + + timestamp_pb = timestamp_pb2.Timestamp(seconds=dt_seconds, nanos=dt_nanos) + value = _value_pb(timestamp_value=timestamp_pb) + + expected_dt_val = DatetimeWithNanoseconds.from_timestamp_pb(timestamp_pb) + self.assertEqual(self._call_fut(value), expected_dt_val) + + def test_unicode(self): + unicode_val = u"zorgon" + value = _value_pb(string_value=unicode_val) + self.assertEqual(self._call_fut(value), unicode_val) + + def test_bytes(self): + bytes_val = b"abc\x80" + value = _value_pb(bytes_value=bytes_val) + self.assertEqual(self._call_fut(value), bytes_val) + + def test_reference(self): + from google.cloud.firestore_v1.document import DocumentReference + + client = _make_client() + path = (u"then", u"there-was-one") + document = client.document(*path) + ref_string = document._document_path + value = _value_pb(reference_value=ref_string) + + result = self._call_fut(value, client) + self.assertIsInstance(result, DocumentReference) + self.assertIs(result._client, client) + self.assertEqual(result._path, path) + + def test_geo_point(self): + from google.cloud.firestore_v1._helpers import GeoPoint + + geo_pt = GeoPoint(latitude=42.5, longitude=99.0625) + value = _value_pb(geo_point_value=geo_pt.to_protobuf()) + self.assertEqual(self._call_fut(value), geo_pt) + + def test_array(self): + from google.cloud.firestore_v1.proto import document_pb2 + + sub_value1 = _value_pb(boolean_value=True) + sub_value2 = _value_pb(double_value=14.1396484375) + sub_value3 = _value_pb(bytes_value=b"\xde\xad\xbe\xef") + array_pb = document_pb2.ArrayValue(values=[sub_value1, sub_value2, sub_value3]) + value = _value_pb(array_value=array_pb) + + expected = [ + sub_value1.boolean_value, + sub_value2.double_value, + sub_value3.bytes_value, + ] + self.assertEqual(self._call_fut(value), expected) + + def test_map(self): + from google.cloud.firestore_v1.proto import document_pb2 + + sub_value1 = _value_pb(integer_value=187680) + sub_value2 = _value_pb(string_value=u"how low can you go?") + map_pb = document_pb2.MapValue( + fields={"first": sub_value1, "second": sub_value2} + ) + value = _value_pb(map_value=map_pb) + + expected = { + "first": sub_value1.integer_value, + "second": sub_value2.string_value, + } + self.assertEqual(self._call_fut(value), expected) + + def test_nested_map(self): + from google.cloud.firestore_v1.proto import document_pb2 + + actual_value1 = 1009876 + actual_value2 = u"hey you guys" + actual_value3 = 90.875 + map_pb1 = document_pb2.MapValue( + fields={ + "lowest": _value_pb(integer_value=actual_value1), + "aside": _value_pb(string_value=actual_value2), + } + ) + map_pb2 = document_pb2.MapValue( + fields={ + "middle": _value_pb(map_value=map_pb1), + "aside": _value_pb(boolean_value=True), + } + ) + map_pb3 = document_pb2.MapValue( + fields={ + "highest": _value_pb(map_value=map_pb2), + "aside": _value_pb(double_value=actual_value3), + } + ) + value = _value_pb(map_value=map_pb3) + + expected = { + "highest": { + "middle": {"lowest": actual_value1, "aside": actual_value2}, + "aside": True, + }, + "aside": actual_value3, + } + self.assertEqual(self._call_fut(value), expected) + + def test_unset_value_type(self): + with self.assertRaises(ValueError): + self._call_fut(_value_pb()) + + def test_unknown_value_type(self): + value_pb = mock.Mock(spec=["WhichOneof"]) + value_pb.WhichOneof.return_value = "zoob_value" + + with self.assertRaises(ValueError): + self._call_fut(value_pb) + + value_pb.WhichOneof.assert_called_once_with("value_type") + + +class Test_decode_dict(unittest.TestCase): + @staticmethod + def _call_fut(value_fields, client=mock.sentinel.client): + from google.cloud.firestore_v1._helpers import decode_dict + + return decode_dict(value_fields, client) + + @unittest.skipIf( + (3,) <= sys.version_info < (3, 4, 4), "known datetime bug (bpo-23517) in Python" + ) + def test_many_types(self): + from google.protobuf import struct_pb2 + from google.protobuf import timestamp_pb2 + from google.cloud.firestore_v1.proto.document_pb2 import ArrayValue + from google.cloud.firestore_v1.proto.document_pb2 import MapValue + from google.cloud._helpers import UTC + from google.cloud.firestore_v1.field_path import FieldPath + + dt_seconds = 1394037350 + dt_nanos = 667285000 + # Make sure precision is valid in microseconds too. + self.assertEqual(dt_nanos % 1000, 0) + dt_val = datetime.datetime.utcfromtimestamp( + dt_seconds + 1e-9 * dt_nanos + ).replace(tzinfo=UTC) + + value_fields = { + "foo": _value_pb(null_value=struct_pb2.NULL_VALUE), + "bar": _value_pb(boolean_value=True), + "baz": _value_pb(integer_value=981), + "quux": _value_pb(double_value=2.875), + "quuz": _value_pb( + timestamp_value=timestamp_pb2.Timestamp( + seconds=dt_seconds, nanos=dt_nanos + ) + ), + "corge": _value_pb(string_value=u"\N{snowman}"), + "grault": _value_pb(bytes_value=b"\xe2\x98\x83"), + "garply": _value_pb( + array_value=ArrayValue( + values=[ + _value_pb(string_value=u"fork"), + _value_pb(double_value=4.0), + ] + ) + ), + "waldo": _value_pb( + map_value=MapValue( + fields={ + "fred": _value_pb(string_value=u"zap"), + "thud": _value_pb(boolean_value=False), + } + ) + ), + FieldPath("a", "b", "c").to_api_repr(): _value_pb(boolean_value=False), + } + expected = { + "foo": None, + "bar": True, + "baz": 981, + "quux": 2.875, + "quuz": dt_val, + "corge": u"\N{snowman}", + "grault": b"\xe2\x98\x83", + "garply": [u"fork", 4.0], + "waldo": {"fred": u"zap", "thud": False}, + "a.b.c": False, + } + self.assertEqual(self._call_fut(value_fields), expected) + + +class Test_get_doc_id(unittest.TestCase): + @staticmethod + def _call_fut(document_pb, expected_prefix): + from google.cloud.firestore_v1._helpers import get_doc_id + + return get_doc_id(document_pb, expected_prefix) + + @staticmethod + def _dummy_ref_string(collection_id): + from google.cloud.firestore_v1.client import DEFAULT_DATABASE + + project = u"bazzzz" + return u"projects/{}/databases/{}/documents/{}".format( + project, DEFAULT_DATABASE, collection_id + ) + + def test_success(self): + from google.cloud.firestore_v1.proto import document_pb2 + + prefix = self._dummy_ref_string("sub-collection") + actual_id = "this-is-the-one" + name = "{}/{}".format(prefix, actual_id) + + document_pb = document_pb2.Document(name=name) + document_id = self._call_fut(document_pb, prefix) + self.assertEqual(document_id, actual_id) + + def test_failure(self): + from google.cloud.firestore_v1.proto import document_pb2 + + actual_prefix = self._dummy_ref_string("the-right-one") + wrong_prefix = self._dummy_ref_string("the-wrong-one") + name = "{}/{}".format(actual_prefix, "sorry-wont-works") + + document_pb = document_pb2.Document(name=name) + with self.assertRaises(ValueError) as exc_info: + self._call_fut(document_pb, wrong_prefix) + + exc_args = exc_info.exception.args + self.assertEqual(len(exc_args), 4) + self.assertEqual(exc_args[1], name) + self.assertEqual(exc_args[3], wrong_prefix) + + +class Test_extract_fields(unittest.TestCase): + @staticmethod + def _call_fut(document_data, prefix_path, expand_dots=False): + from google.cloud.firestore_v1 import _helpers + + return _helpers.extract_fields( + document_data, prefix_path, expand_dots=expand_dots + ) + + def test_w_empty_document(self): + from google.cloud.firestore_v1._helpers import _EmptyDict + + document_data = {} + prefix_path = _make_field_path() + expected = [(_make_field_path(), _EmptyDict)] + + iterator = self._call_fut(document_data, prefix_path) + self.assertEqual(list(iterator), expected) + + def test_w_invalid_key_and_expand_dots(self): + document_data = {"b": 1, "a~d": 2, "c": 3} + prefix_path = _make_field_path() + + with self.assertRaises(ValueError): + list(self._call_fut(document_data, prefix_path, expand_dots=True)) + + def test_w_shallow_keys(self): + document_data = {"b": 1, "a": 2, "c": 3} + prefix_path = _make_field_path() + expected = [ + (_make_field_path("a"), 2), + (_make_field_path("b"), 1), + (_make_field_path("c"), 3), + ] + + iterator = self._call_fut(document_data, prefix_path) + self.assertEqual(list(iterator), expected) + + def test_w_nested(self): + from google.cloud.firestore_v1._helpers import _EmptyDict + + document_data = {"b": {"a": {"d": 4, "c": 3, "g": {}}, "e": 7}, "f": 5} + prefix_path = _make_field_path() + expected = [ + (_make_field_path("b", "a", "c"), 3), + (_make_field_path("b", "a", "d"), 4), + (_make_field_path("b", "a", "g"), _EmptyDict), + (_make_field_path("b", "e"), 7), + (_make_field_path("f"), 5), + ] + + iterator = self._call_fut(document_data, prefix_path) + self.assertEqual(list(iterator), expected) + + def test_w_expand_dotted(self): + from google.cloud.firestore_v1._helpers import _EmptyDict + + document_data = { + "b": {"a": {"d": 4, "c": 3, "g": {}, "k.l.m": 17}, "e": 7}, + "f": 5, + "h.i.j": 9, + } + prefix_path = _make_field_path() + expected = [ + (_make_field_path("b", "a", "c"), 3), + (_make_field_path("b", "a", "d"), 4), + (_make_field_path("b", "a", "g"), _EmptyDict), + (_make_field_path("b", "a", "k.l.m"), 17), + (_make_field_path("b", "e"), 7), + (_make_field_path("f"), 5), + (_make_field_path("h", "i", "j"), 9), + ] + + iterator = self._call_fut(document_data, prefix_path, expand_dots=True) + self.assertEqual(list(iterator), expected) + + +class Test_set_field_value(unittest.TestCase): + @staticmethod + def _call_fut(document_data, field_path, value): + from google.cloud.firestore_v1 import _helpers + + return _helpers.set_field_value(document_data, field_path, value) + + def test_normal_value_w_shallow(self): + document = {} + field_path = _make_field_path("a") + value = 3 + + self._call_fut(document, field_path, value) + + self.assertEqual(document, {"a": 3}) + + def test_normal_value_w_nested(self): + document = {} + field_path = _make_field_path("a", "b", "c") + value = 3 + + self._call_fut(document, field_path, value) + + self.assertEqual(document, {"a": {"b": {"c": 3}}}) + + def test_empty_dict_w_shallow(self): + from google.cloud.firestore_v1._helpers import _EmptyDict + + document = {} + field_path = _make_field_path("a") + value = _EmptyDict + + self._call_fut(document, field_path, value) + + self.assertEqual(document, {"a": {}}) + + def test_empty_dict_w_nested(self): + from google.cloud.firestore_v1._helpers import _EmptyDict + + document = {} + field_path = _make_field_path("a", "b", "c") + value = _EmptyDict + + self._call_fut(document, field_path, value) + + self.assertEqual(document, {"a": {"b": {"c": {}}}}) + + +class Test_get_field_value(unittest.TestCase): + @staticmethod + def _call_fut(document_data, field_path): + from google.cloud.firestore_v1 import _helpers + + return _helpers.get_field_value(document_data, field_path) + + def test_w_empty_path(self): + document = {} + + with self.assertRaises(ValueError): + self._call_fut(document, _make_field_path()) + + def test_miss_shallow(self): + document = {} + + with self.assertRaises(KeyError): + self._call_fut(document, _make_field_path("nonesuch")) + + def test_miss_nested(self): + document = {"a": {"b": {}}} + + with self.assertRaises(KeyError): + self._call_fut(document, _make_field_path("a", "b", "c")) + + def test_hit_shallow(self): + document = {"a": 1} + + self.assertEqual(self._call_fut(document, _make_field_path("a")), 1) + + def test_hit_nested(self): + document = {"a": {"b": {"c": 1}}} + + self.assertEqual(self._call_fut(document, _make_field_path("a", "b", "c")), 1) + + +class TestDocumentExtractor(unittest.TestCase): + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1 import _helpers + + return _helpers.DocumentExtractor + + def _make_one(self, document_data): + return self._get_target_class()(document_data) + + def test_ctor_w_empty_document(self): + document_data = {} + + inst = self._make_one(document_data) + + self.assertEqual(inst.document_data, document_data) + self.assertEqual(inst.field_paths, []) + self.assertEqual(inst.deleted_fields, []) + self.assertEqual(inst.server_timestamps, []) + self.assertEqual(inst.array_removes, {}) + self.assertEqual(inst.array_unions, {}) + self.assertEqual(inst.set_fields, {}) + self.assertTrue(inst.empty_document) + self.assertFalse(inst.has_transforms) + self.assertEqual(inst.transform_paths, []) + + def test_ctor_w_delete_field_shallow(self): + from google.cloud.firestore_v1.transforms import DELETE_FIELD + + document_data = {"a": DELETE_FIELD} + + inst = self._make_one(document_data) + + self.assertEqual(inst.document_data, document_data) + self.assertEqual(inst.field_paths, []) + self.assertEqual(inst.deleted_fields, [_make_field_path("a")]) + self.assertEqual(inst.server_timestamps, []) + self.assertEqual(inst.array_removes, {}) + self.assertEqual(inst.array_unions, {}) + self.assertEqual(inst.set_fields, {}) + self.assertFalse(inst.empty_document) + self.assertFalse(inst.has_transforms) + self.assertEqual(inst.transform_paths, []) + + def test_ctor_w_delete_field_nested(self): + from google.cloud.firestore_v1.transforms import DELETE_FIELD + + document_data = {"a": {"b": {"c": DELETE_FIELD}}} + + inst = self._make_one(document_data) + + self.assertEqual(inst.document_data, document_data) + self.assertEqual(inst.field_paths, []) + self.assertEqual(inst.deleted_fields, [_make_field_path("a", "b", "c")]) + self.assertEqual(inst.server_timestamps, []) + self.assertEqual(inst.array_removes, {}) + self.assertEqual(inst.array_unions, {}) + self.assertEqual(inst.set_fields, {}) + self.assertFalse(inst.empty_document) + self.assertFalse(inst.has_transforms) + self.assertEqual(inst.transform_paths, []) + + def test_ctor_w_server_timestamp_shallow(self): + from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP + + document_data = {"a": SERVER_TIMESTAMP} + + inst = self._make_one(document_data) + + self.assertEqual(inst.document_data, document_data) + self.assertEqual(inst.field_paths, []) + self.assertEqual(inst.deleted_fields, []) + self.assertEqual(inst.server_timestamps, [_make_field_path("a")]) + self.assertEqual(inst.array_removes, {}) + self.assertEqual(inst.array_unions, {}) + self.assertEqual(inst.set_fields, {}) + self.assertFalse(inst.empty_document) + self.assertTrue(inst.has_transforms) + self.assertEqual(inst.transform_paths, [_make_field_path("a")]) + + def test_ctor_w_server_timestamp_nested(self): + from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP + + document_data = {"a": {"b": {"c": SERVER_TIMESTAMP}}} + + inst = self._make_one(document_data) + + self.assertEqual(inst.document_data, document_data) + self.assertEqual(inst.field_paths, []) + self.assertEqual(inst.deleted_fields, []) + self.assertEqual(inst.server_timestamps, [_make_field_path("a", "b", "c")]) + self.assertEqual(inst.array_removes, {}) + self.assertEqual(inst.array_unions, {}) + self.assertEqual(inst.set_fields, {}) + self.assertFalse(inst.empty_document) + self.assertTrue(inst.has_transforms) + self.assertEqual(inst.transform_paths, [_make_field_path("a", "b", "c")]) + + def test_ctor_w_array_remove_shallow(self): + from google.cloud.firestore_v1.transforms import ArrayRemove + + values = [1, 3, 5] + document_data = {"a": ArrayRemove(values)} + + inst = self._make_one(document_data) + + expected_array_removes = {_make_field_path("a"): values} + self.assertEqual(inst.document_data, document_data) + self.assertEqual(inst.field_paths, []) + self.assertEqual(inst.deleted_fields, []) + self.assertEqual(inst.server_timestamps, []) + self.assertEqual(inst.array_removes, expected_array_removes) + self.assertEqual(inst.array_unions, {}) + self.assertEqual(inst.set_fields, {}) + self.assertFalse(inst.empty_document) + self.assertTrue(inst.has_transforms) + self.assertEqual(inst.transform_paths, [_make_field_path("a")]) + + def test_ctor_w_array_remove_nested(self): + from google.cloud.firestore_v1.transforms import ArrayRemove + + values = [2, 4, 8] + document_data = {"a": {"b": {"c": ArrayRemove(values)}}} + + inst = self._make_one(document_data) + + expected_array_removes = {_make_field_path("a", "b", "c"): values} + self.assertEqual(inst.document_data, document_data) + self.assertEqual(inst.field_paths, []) + self.assertEqual(inst.deleted_fields, []) + self.assertEqual(inst.server_timestamps, []) + self.assertEqual(inst.array_removes, expected_array_removes) + self.assertEqual(inst.array_unions, {}) + self.assertEqual(inst.set_fields, {}) + self.assertFalse(inst.empty_document) + self.assertTrue(inst.has_transforms) + self.assertEqual(inst.transform_paths, [_make_field_path("a", "b", "c")]) + + def test_ctor_w_array_union_shallow(self): + from google.cloud.firestore_v1.transforms import ArrayUnion + + values = [1, 3, 5] + document_data = {"a": ArrayUnion(values)} + + inst = self._make_one(document_data) + + expected_array_unions = {_make_field_path("a"): values} + self.assertEqual(inst.document_data, document_data) + self.assertEqual(inst.field_paths, []) + self.assertEqual(inst.deleted_fields, []) + self.assertEqual(inst.server_timestamps, []) + self.assertEqual(inst.array_removes, {}) + self.assertEqual(inst.array_unions, expected_array_unions) + self.assertEqual(inst.set_fields, {}) + self.assertFalse(inst.empty_document) + self.assertTrue(inst.has_transforms) + self.assertEqual(inst.transform_paths, [_make_field_path("a")]) + + def test_ctor_w_array_union_nested(self): + from google.cloud.firestore_v1.transforms import ArrayUnion + + values = [2, 4, 8] + document_data = {"a": {"b": {"c": ArrayUnion(values)}}} + + inst = self._make_one(document_data) + + expected_array_unions = {_make_field_path("a", "b", "c"): values} + self.assertEqual(inst.document_data, document_data) + self.assertEqual(inst.field_paths, []) + self.assertEqual(inst.deleted_fields, []) + self.assertEqual(inst.server_timestamps, []) + self.assertEqual(inst.array_removes, {}) + self.assertEqual(inst.array_unions, expected_array_unions) + self.assertEqual(inst.set_fields, {}) + self.assertFalse(inst.empty_document) + self.assertTrue(inst.has_transforms) + self.assertEqual(inst.transform_paths, [_make_field_path("a", "b", "c")]) + + def test_ctor_w_empty_dict_shallow(self): + document_data = {"a": {}} + + inst = self._make_one(document_data) + + expected_field_paths = [_make_field_path("a")] + self.assertEqual(inst.document_data, document_data) + self.assertEqual(inst.field_paths, expected_field_paths) + self.assertEqual(inst.deleted_fields, []) + self.assertEqual(inst.server_timestamps, []) + self.assertEqual(inst.array_removes, {}) + self.assertEqual(inst.array_unions, {}) + self.assertEqual(inst.set_fields, document_data) + self.assertFalse(inst.empty_document) + self.assertFalse(inst.has_transforms) + self.assertEqual(inst.transform_paths, []) + + def test_ctor_w_empty_dict_nested(self): + document_data = {"a": {"b": {"c": {}}}} + + inst = self._make_one(document_data) + + expected_field_paths = [_make_field_path("a", "b", "c")] + self.assertEqual(inst.document_data, document_data) + self.assertEqual(inst.field_paths, expected_field_paths) + self.assertEqual(inst.deleted_fields, []) + self.assertEqual(inst.server_timestamps, []) + self.assertEqual(inst.array_removes, {}) + self.assertEqual(inst.array_unions, {}) + self.assertEqual(inst.set_fields, document_data) + self.assertFalse(inst.empty_document) + self.assertFalse(inst.has_transforms) + self.assertEqual(inst.transform_paths, []) + + def test_ctor_w_normal_value_shallow(self): + document_data = {"b": 1, "a": 2, "c": 3} + + inst = self._make_one(document_data) + + expected_field_paths = [ + _make_field_path("a"), + _make_field_path("b"), + _make_field_path("c"), + ] + self.assertEqual(inst.document_data, document_data) + self.assertEqual(inst.field_paths, expected_field_paths) + self.assertEqual(inst.deleted_fields, []) + self.assertEqual(inst.server_timestamps, []) + self.assertEqual(inst.array_removes, {}) + self.assertEqual(inst.array_unions, {}) + self.assertEqual(inst.set_fields, document_data) + self.assertFalse(inst.empty_document) + self.assertFalse(inst.has_transforms) + + def test_ctor_w_normal_value_nested(self): + document_data = {"b": {"a": {"d": 4, "c": 3}, "e": 7}, "f": 5} + + inst = self._make_one(document_data) + + expected_field_paths = [ + _make_field_path("b", "a", "c"), + _make_field_path("b", "a", "d"), + _make_field_path("b", "e"), + _make_field_path("f"), + ] + self.assertEqual(inst.document_data, document_data) + self.assertEqual(inst.field_paths, expected_field_paths) + self.assertEqual(inst.deleted_fields, []) + self.assertEqual(inst.server_timestamps, []) + self.assertEqual(inst.array_removes, {}) + self.assertEqual(inst.array_unions, {}) + self.assertEqual(inst.set_fields, document_data) + self.assertFalse(inst.empty_document) + self.assertFalse(inst.has_transforms) + + def test_get_update_pb_w_exists_precondition(self): + from google.cloud.firestore_v1.proto import write_pb2 + + document_data = {} + inst = self._make_one(document_data) + document_path = ( + "projects/project-id/databases/(default)/" "documents/document-id" + ) + + update_pb = inst.get_update_pb(document_path, exists=False) + + self.assertIsInstance(update_pb, write_pb2.Write) + self.assertEqual(update_pb.update.name, document_path) + self.assertEqual(update_pb.update.fields, document_data) + self.assertTrue(update_pb.HasField("current_document")) + self.assertFalse(update_pb.current_document.exists) + + def test_get_update_pb_wo_exists_precondition(self): + from google.cloud.firestore_v1.proto import write_pb2 + from google.cloud.firestore_v1._helpers import encode_dict + + document_data = {"a": 1} + inst = self._make_one(document_data) + document_path = ( + "projects/project-id/databases/(default)/" "documents/document-id" + ) + + update_pb = inst.get_update_pb(document_path) + + self.assertIsInstance(update_pb, write_pb2.Write) + self.assertEqual(update_pb.update.name, document_path) + self.assertEqual(update_pb.update.fields, encode_dict(document_data)) + self.assertFalse(update_pb.HasField("current_document")) + + def test_get_transform_pb_w_server_timestamp_w_exists_precondition(self): + from google.cloud.firestore_v1.proto import write_pb2 + from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP + from google.cloud.firestore_v1._helpers import REQUEST_TIME_ENUM + + document_data = {"a": SERVER_TIMESTAMP} + inst = self._make_one(document_data) + document_path = ( + "projects/project-id/databases/(default)/" "documents/document-id" + ) + + transform_pb = inst.get_transform_pb(document_path, exists=False) + + self.assertIsInstance(transform_pb, write_pb2.Write) + self.assertEqual(transform_pb.transform.document, document_path) + transforms = transform_pb.transform.field_transforms + self.assertEqual(len(transforms), 1) + transform = transforms[0] + self.assertEqual(transform.field_path, "a") + self.assertEqual(transform.set_to_server_value, REQUEST_TIME_ENUM) + self.assertTrue(transform_pb.HasField("current_document")) + self.assertFalse(transform_pb.current_document.exists) + + def test_get_transform_pb_w_server_timestamp_wo_exists_precondition(self): + from google.cloud.firestore_v1.proto import write_pb2 + from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP + from google.cloud.firestore_v1._helpers import REQUEST_TIME_ENUM + + document_data = {"a": {"b": {"c": SERVER_TIMESTAMP}}} + inst = self._make_one(document_data) + document_path = ( + "projects/project-id/databases/(default)/" "documents/document-id" + ) + + transform_pb = inst.get_transform_pb(document_path) + + self.assertIsInstance(transform_pb, write_pb2.Write) + self.assertEqual(transform_pb.transform.document, document_path) + transforms = transform_pb.transform.field_transforms + self.assertEqual(len(transforms), 1) + transform = transforms[0] + self.assertEqual(transform.field_path, "a.b.c") + self.assertEqual(transform.set_to_server_value, REQUEST_TIME_ENUM) + self.assertFalse(transform_pb.HasField("current_document")) + + @staticmethod + def _array_value_to_list(array_value): + from google.cloud.firestore_v1._helpers import decode_value + + return [decode_value(element, client=None) for element in array_value.values] + + def test_get_transform_pb_w_array_remove(self): + from google.cloud.firestore_v1.proto import write_pb2 + from google.cloud.firestore_v1.transforms import ArrayRemove + + values = [2, 4, 8] + document_data = {"a": {"b": {"c": ArrayRemove(values)}}} + inst = self._make_one(document_data) + document_path = ( + "projects/project-id/databases/(default)/" "documents/document-id" + ) + + transform_pb = inst.get_transform_pb(document_path) + + self.assertIsInstance(transform_pb, write_pb2.Write) + self.assertEqual(transform_pb.transform.document, document_path) + transforms = transform_pb.transform.field_transforms + self.assertEqual(len(transforms), 1) + transform = transforms[0] + self.assertEqual(transform.field_path, "a.b.c") + removed = self._array_value_to_list(transform.remove_all_from_array) + self.assertEqual(removed, values) + self.assertFalse(transform_pb.HasField("current_document")) + + def test_get_transform_pb_w_array_union(self): + from google.cloud.firestore_v1.proto import write_pb2 + from google.cloud.firestore_v1.transforms import ArrayUnion + + values = [1, 3, 5] + document_data = {"a": {"b": {"c": ArrayUnion(values)}}} + inst = self._make_one(document_data) + document_path = ( + "projects/project-id/databases/(default)/" "documents/document-id" + ) + + transform_pb = inst.get_transform_pb(document_path) + + self.assertIsInstance(transform_pb, write_pb2.Write) + self.assertEqual(transform_pb.transform.document, document_path) + transforms = transform_pb.transform.field_transforms + self.assertEqual(len(transforms), 1) + transform = transforms[0] + self.assertEqual(transform.field_path, "a.b.c") + added = self._array_value_to_list(transform.append_missing_elements) + self.assertEqual(added, values) + self.assertFalse(transform_pb.HasField("current_document")) + + +class Test_pbs_for_create(unittest.TestCase): + @staticmethod + def _call_fut(document_path, document_data): + from google.cloud.firestore_v1._helpers import pbs_for_create + + return pbs_for_create(document_path, document_data) + + @staticmethod + def _make_write_w_document(document_path, **data): + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + from google.cloud.firestore_v1._helpers import encode_dict + from google.cloud.firestore_v1.proto import common_pb2 + + return write_pb2.Write( + update=document_pb2.Document(name=document_path, fields=encode_dict(data)), + current_document=common_pb2.Precondition(exists=False), + ) + + @staticmethod + def _make_write_w_transform(document_path, fields): + from google.cloud.firestore_v1.proto import write_pb2 + from google.cloud.firestore_v1.gapic import enums + + server_val = enums.DocumentTransform.FieldTransform.ServerValue + transforms = [ + write_pb2.DocumentTransform.FieldTransform( + field_path=field, set_to_server_value=server_val.REQUEST_TIME + ) + for field in fields + ] + + return write_pb2.Write( + transform=write_pb2.DocumentTransform( + document=document_path, field_transforms=transforms + ) + ) + + def _helper(self, do_transform=False, empty_val=False): + from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP + + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + document_data = {"cheese": 1.5, "crackers": True} + + if do_transform: + document_data["butter"] = SERVER_TIMESTAMP + + if empty_val: + document_data["mustard"] = {} + + write_pbs = self._call_fut(document_path, document_data) + + if empty_val: + update_pb = self._make_write_w_document( + document_path, cheese=1.5, crackers=True, mustard={} + ) + else: + update_pb = self._make_write_w_document( + document_path, cheese=1.5, crackers=True + ) + expected_pbs = [update_pb] + + if do_transform: + expected_pbs.append( + self._make_write_w_transform(document_path, fields=["butter"]) + ) + + self.assertEqual(write_pbs, expected_pbs) + + def test_without_transform(self): + self._helper() + + def test_w_transform(self): + self._helper(do_transform=True) + + def test_w_transform_and_empty_value(self): + self._helper(do_transform=True, empty_val=True) + + +class Test_pbs_for_set_no_merge(unittest.TestCase): + @staticmethod + def _call_fut(document_path, document_data): + from google.cloud.firestore_v1 import _helpers + + return _helpers.pbs_for_set_no_merge(document_path, document_data) + + @staticmethod + def _make_write_w_document(document_path, **data): + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + from google.cloud.firestore_v1._helpers import encode_dict + + return write_pb2.Write( + update=document_pb2.Document(name=document_path, fields=encode_dict(data)) + ) + + @staticmethod + def _make_write_w_transform(document_path, fields): + from google.cloud.firestore_v1.proto import write_pb2 + from google.cloud.firestore_v1.gapic import enums + + server_val = enums.DocumentTransform.FieldTransform.ServerValue + transforms = [ + write_pb2.DocumentTransform.FieldTransform( + field_path=field, set_to_server_value=server_val.REQUEST_TIME + ) + for field in fields + ] + + return write_pb2.Write( + transform=write_pb2.DocumentTransform( + document=document_path, field_transforms=transforms + ) + ) + + def test_w_empty_document(self): + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + document_data = {} + + write_pbs = self._call_fut(document_path, document_data) + + update_pb = self._make_write_w_document(document_path) + expected_pbs = [update_pb] + self.assertEqual(write_pbs, expected_pbs) + + def test_w_only_server_timestamp(self): + from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP + + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + document_data = {"butter": SERVER_TIMESTAMP} + + write_pbs = self._call_fut(document_path, document_data) + + update_pb = self._make_write_w_document(document_path) + transform_pb = self._make_write_w_transform(document_path, ["butter"]) + expected_pbs = [update_pb, transform_pb] + self.assertEqual(write_pbs, expected_pbs) + + def _helper(self, do_transform=False, empty_val=False): + from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP + + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + document_data = {"cheese": 1.5, "crackers": True} + + if do_transform: + document_data["butter"] = SERVER_TIMESTAMP + + if empty_val: + document_data["mustard"] = {} + + write_pbs = self._call_fut(document_path, document_data) + + if empty_val: + update_pb = self._make_write_w_document( + document_path, cheese=1.5, crackers=True, mustard={} + ) + else: + update_pb = self._make_write_w_document( + document_path, cheese=1.5, crackers=True + ) + expected_pbs = [update_pb] + + if do_transform: + expected_pbs.append( + self._make_write_w_transform(document_path, fields=["butter"]) + ) + + self.assertEqual(write_pbs, expected_pbs) + + def test_defaults(self): + self._helper() + + def test_w_transform(self): + self._helper(do_transform=True) + + def test_w_transform_and_empty_value(self): + # Exercise #5944 + self._helper(do_transform=True, empty_val=True) + + +class TestDocumentExtractorForMerge(unittest.TestCase): + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1 import _helpers + + return _helpers.DocumentExtractorForMerge + + def _make_one(self, document_data): + return self._get_target_class()(document_data) + + def test_ctor_w_empty_document(self): + document_data = {} + + inst = self._make_one(document_data) + + self.assertEqual(inst.data_merge, []) + self.assertEqual(inst.transform_merge, []) + self.assertEqual(inst.merge, []) + + def test_apply_merge_all_w_empty_document(self): + document_data = {} + inst = self._make_one(document_data) + + inst.apply_merge(True) + + self.assertEqual(inst.data_merge, []) + self.assertEqual(inst.transform_merge, []) + self.assertEqual(inst.merge, []) + self.assertFalse(inst.has_updates) + + def test_apply_merge_all_w_delete(self): + from google.cloud.firestore_v1.transforms import DELETE_FIELD + + document_data = {"write_me": "value", "delete_me": DELETE_FIELD} + inst = self._make_one(document_data) + + inst.apply_merge(True) + + expected_data_merge = [ + _make_field_path("delete_me"), + _make_field_path("write_me"), + ] + self.assertEqual(inst.data_merge, expected_data_merge) + self.assertEqual(inst.transform_merge, []) + self.assertEqual(inst.merge, expected_data_merge) + self.assertTrue(inst.has_updates) + + def test_apply_merge_all_w_server_timestamp(self): + from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP + + document_data = {"write_me": "value", "timestamp": SERVER_TIMESTAMP} + inst = self._make_one(document_data) + + inst.apply_merge(True) + + expected_data_merge = [_make_field_path("write_me")] + expected_transform_merge = [_make_field_path("timestamp")] + expected_merge = [_make_field_path("timestamp"), _make_field_path("write_me")] + self.assertEqual(inst.data_merge, expected_data_merge) + self.assertEqual(inst.transform_merge, expected_transform_merge) + self.assertEqual(inst.merge, expected_merge) + self.assertTrue(inst.has_updates) + + def test_apply_merge_list_fields_w_empty_document(self): + document_data = {} + inst = self._make_one(document_data) + + with self.assertRaises(ValueError): + inst.apply_merge(["nonesuch", "or.this"]) + + def test_apply_merge_list_fields_w_unmerged_delete(self): + from google.cloud.firestore_v1.transforms import DELETE_FIELD + + document_data = { + "write_me": "value", + "delete_me": DELETE_FIELD, + "ignore_me": 123, + "unmerged_delete": DELETE_FIELD, + } + inst = self._make_one(document_data) + + with self.assertRaises(ValueError): + inst.apply_merge(["write_me", "delete_me"]) + + def test_apply_merge_list_fields_w_delete(self): + from google.cloud.firestore_v1.transforms import DELETE_FIELD + + document_data = { + "write_me": "value", + "delete_me": DELETE_FIELD, + "ignore_me": 123, + } + inst = self._make_one(document_data) + + inst.apply_merge(["write_me", "delete_me"]) + + expected_set_fields = {"write_me": "value"} + expected_deleted_fields = [_make_field_path("delete_me")] + self.assertEqual(inst.set_fields, expected_set_fields) + self.assertEqual(inst.deleted_fields, expected_deleted_fields) + self.assertTrue(inst.has_updates) + + def test_apply_merge_list_fields_w_prefixes(self): + + document_data = {"a": {"b": {"c": 123}}} + inst = self._make_one(document_data) + + with self.assertRaises(ValueError): + inst.apply_merge(["a", "a.b"]) + + def test_apply_merge_list_fields_w_missing_data_string_paths(self): + + document_data = {"write_me": "value", "ignore_me": 123} + inst = self._make_one(document_data) + + with self.assertRaises(ValueError): + inst.apply_merge(["write_me", "nonesuch"]) + + def test_apply_merge_list_fields_w_non_merge_field(self): + + document_data = {"write_me": "value", "ignore_me": 123} + inst = self._make_one(document_data) + + inst.apply_merge([_make_field_path("write_me")]) + + expected_set_fields = {"write_me": "value"} + self.assertEqual(inst.set_fields, expected_set_fields) + self.assertTrue(inst.has_updates) + + def test_apply_merge_list_fields_w_server_timestamp(self): + from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP + + document_data = { + "write_me": "value", + "timestamp": SERVER_TIMESTAMP, + "ignored_stamp": SERVER_TIMESTAMP, + } + inst = self._make_one(document_data) + + inst.apply_merge([_make_field_path("write_me"), _make_field_path("timestamp")]) + + expected_data_merge = [_make_field_path("write_me")] + expected_transform_merge = [_make_field_path("timestamp")] + expected_merge = [_make_field_path("timestamp"), _make_field_path("write_me")] + self.assertEqual(inst.data_merge, expected_data_merge) + self.assertEqual(inst.transform_merge, expected_transform_merge) + self.assertEqual(inst.merge, expected_merge) + expected_server_timestamps = [_make_field_path("timestamp")] + self.assertEqual(inst.server_timestamps, expected_server_timestamps) + self.assertTrue(inst.has_updates) + + def test_apply_merge_list_fields_w_array_remove(self): + from google.cloud.firestore_v1.transforms import ArrayRemove + + values = [2, 4, 8] + document_data = { + "write_me": "value", + "remove_me": ArrayRemove(values), + "ignored_remove_me": ArrayRemove((1, 3, 5)), + } + inst = self._make_one(document_data) + + inst.apply_merge([_make_field_path("write_me"), _make_field_path("remove_me")]) + + expected_data_merge = [_make_field_path("write_me")] + expected_transform_merge = [_make_field_path("remove_me")] + expected_merge = [_make_field_path("remove_me"), _make_field_path("write_me")] + self.assertEqual(inst.data_merge, expected_data_merge) + self.assertEqual(inst.transform_merge, expected_transform_merge) + self.assertEqual(inst.merge, expected_merge) + expected_array_removes = {_make_field_path("remove_me"): values} + self.assertEqual(inst.array_removes, expected_array_removes) + self.assertTrue(inst.has_updates) + + def test_apply_merge_list_fields_w_array_union(self): + from google.cloud.firestore_v1.transforms import ArrayUnion + + values = [1, 3, 5] + document_data = { + "write_me": "value", + "union_me": ArrayUnion(values), + "ignored_union_me": ArrayUnion((2, 4, 8)), + } + inst = self._make_one(document_data) + + inst.apply_merge([_make_field_path("write_me"), _make_field_path("union_me")]) + + expected_data_merge = [_make_field_path("write_me")] + expected_transform_merge = [_make_field_path("union_me")] + expected_merge = [_make_field_path("union_me"), _make_field_path("write_me")] + self.assertEqual(inst.data_merge, expected_data_merge) + self.assertEqual(inst.transform_merge, expected_transform_merge) + self.assertEqual(inst.merge, expected_merge) + expected_array_unions = {_make_field_path("union_me"): values} + self.assertEqual(inst.array_unions, expected_array_unions) + self.assertTrue(inst.has_updates) + + +class Test_pbs_for_set_with_merge(unittest.TestCase): + @staticmethod + def _call_fut(document_path, document_data, merge): + from google.cloud.firestore_v1 import _helpers + + return _helpers.pbs_for_set_with_merge( + document_path, document_data, merge=merge + ) + + @staticmethod + def _make_write_w_document(document_path, **data): + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + from google.cloud.firestore_v1._helpers import encode_dict + + return write_pb2.Write( + update=document_pb2.Document(name=document_path, fields=encode_dict(data)) + ) + + @staticmethod + def _make_write_w_transform(document_path, fields): + from google.cloud.firestore_v1.proto import write_pb2 + from google.cloud.firestore_v1.gapic import enums + + server_val = enums.DocumentTransform.FieldTransform.ServerValue + transforms = [ + write_pb2.DocumentTransform.FieldTransform( + field_path=field, set_to_server_value=server_val.REQUEST_TIME + ) + for field in fields + ] + + return write_pb2.Write( + transform=write_pb2.DocumentTransform( + document=document_path, field_transforms=transforms + ) + ) + + @staticmethod + def _update_document_mask(update_pb, field_paths): + from google.cloud.firestore_v1.proto import common_pb2 + + update_pb.update_mask.CopyFrom( + common_pb2.DocumentMask(field_paths=sorted(field_paths)) + ) + + def test_with_merge_true_wo_transform(self): + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + document_data = {"cheese": 1.5, "crackers": True} + + write_pbs = self._call_fut(document_path, document_data, merge=True) + + update_pb = self._make_write_w_document(document_path, **document_data) + self._update_document_mask(update_pb, field_paths=sorted(document_data)) + expected_pbs = [update_pb] + self.assertEqual(write_pbs, expected_pbs) + + def test_with_merge_field_wo_transform(self): + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + document_data = {"cheese": 1.5, "crackers": True} + + write_pbs = self._call_fut(document_path, document_data, merge=["cheese"]) + + update_pb = self._make_write_w_document( + document_path, cheese=document_data["cheese"] + ) + self._update_document_mask(update_pb, field_paths=["cheese"]) + expected_pbs = [update_pb] + self.assertEqual(write_pbs, expected_pbs) + + def test_with_merge_true_w_transform(self): + from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP + + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + update_data = {"cheese": 1.5, "crackers": True} + document_data = update_data.copy() + document_data["butter"] = SERVER_TIMESTAMP + + write_pbs = self._call_fut(document_path, document_data, merge=True) + + update_pb = self._make_write_w_document(document_path, **update_data) + self._update_document_mask(update_pb, field_paths=sorted(update_data)) + transform_pb = self._make_write_w_transform(document_path, fields=["butter"]) + expected_pbs = [update_pb, transform_pb] + self.assertEqual(write_pbs, expected_pbs) + + def test_with_merge_field_w_transform(self): + from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP + + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + update_data = {"cheese": 1.5, "crackers": True} + document_data = update_data.copy() + document_data["butter"] = SERVER_TIMESTAMP + + write_pbs = self._call_fut( + document_path, document_data, merge=["cheese", "butter"] + ) + + update_pb = self._make_write_w_document( + document_path, cheese=document_data["cheese"] + ) + self._update_document_mask(update_pb, ["cheese"]) + transform_pb = self._make_write_w_transform(document_path, fields=["butter"]) + expected_pbs = [update_pb, transform_pb] + self.assertEqual(write_pbs, expected_pbs) + + def test_with_merge_field_w_transform_masking_simple(self): + from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP + + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + update_data = {"cheese": 1.5, "crackers": True} + document_data = update_data.copy() + document_data["butter"] = {"pecan": SERVER_TIMESTAMP} + + write_pbs = self._call_fut(document_path, document_data, merge=["butter.pecan"]) + + update_pb = self._make_write_w_document(document_path) + transform_pb = self._make_write_w_transform( + document_path, fields=["butter.pecan"] + ) + expected_pbs = [update_pb, transform_pb] + self.assertEqual(write_pbs, expected_pbs) + + def test_with_merge_field_w_transform_parent(self): + from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP + + document_path = _make_ref_string(u"little", u"town", u"of", u"ham") + update_data = {"cheese": 1.5, "crackers": True} + document_data = update_data.copy() + document_data["butter"] = {"popcorn": "yum", "pecan": SERVER_TIMESTAMP} + + write_pbs = self._call_fut( + document_path, document_data, merge=["cheese", "butter"] + ) + + update_pb = self._make_write_w_document( + document_path, cheese=update_data["cheese"], butter={"popcorn": "yum"} + ) + self._update_document_mask(update_pb, ["cheese", "butter"]) + transform_pb = self._make_write_w_transform( + document_path, fields=["butter.pecan"] + ) + expected_pbs = [update_pb, transform_pb] + self.assertEqual(write_pbs, expected_pbs) + + +class TestDocumentExtractorForUpdate(unittest.TestCase): + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1 import _helpers + + return _helpers.DocumentExtractorForUpdate + + def _make_one(self, document_data): + return self._get_target_class()(document_data) + + def test_ctor_w_empty_document(self): + document_data = {} + + inst = self._make_one(document_data) + self.assertEqual(inst.top_level_paths, []) + + def test_ctor_w_simple_keys(self): + document_data = {"a": 1, "b": 2, "c": 3} + + expected_paths = [ + _make_field_path("a"), + _make_field_path("b"), + _make_field_path("c"), + ] + inst = self._make_one(document_data) + self.assertEqual(inst.top_level_paths, expected_paths) + + def test_ctor_w_nested_keys(self): + document_data = {"a": {"d": {"e": 1}}, "b": {"f": 7}, "c": 3} + + expected_paths = [ + _make_field_path("a"), + _make_field_path("b"), + _make_field_path("c"), + ] + inst = self._make_one(document_data) + self.assertEqual(inst.top_level_paths, expected_paths) + + def test_ctor_w_dotted_keys(self): + document_data = {"a.d.e": 1, "b.f": 7, "c": 3} + + expected_paths = [ + _make_field_path("a", "d", "e"), + _make_field_path("b", "f"), + _make_field_path("c"), + ] + inst = self._make_one(document_data) + self.assertEqual(inst.top_level_paths, expected_paths) + + def test_ctor_w_nested_dotted_keys(self): + document_data = {"a.d.e": 1, "b.f": {"h.i": 9}, "c": 3} + + expected_paths = [ + _make_field_path("a", "d", "e"), + _make_field_path("b", "f"), + _make_field_path("c"), + ] + expected_set_fields = {"a": {"d": {"e": 1}}, "b": {"f": {"h.i": 9}}, "c": 3} + inst = self._make_one(document_data) + self.assertEqual(inst.top_level_paths, expected_paths) + self.assertEqual(inst.set_fields, expected_set_fields) + + +class Test_pbs_for_update(unittest.TestCase): + @staticmethod + def _call_fut(document_path, field_updates, option): + from google.cloud.firestore_v1._helpers import pbs_for_update + + return pbs_for_update(document_path, field_updates, option) + + def _helper(self, option=None, do_transform=False, **write_kwargs): + from google.cloud.firestore_v1 import _helpers + from google.cloud.firestore_v1.field_path import FieldPath + from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP + from google.cloud.firestore_v1.gapic import enums + from google.cloud.firestore_v1.proto import common_pb2 + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + + document_path = _make_ref_string(u"toy", u"car", u"onion", u"garlic") + field_path1 = "bitez.yum" + value = b"\x00\x01" + field_path2 = "blog.internet" + + field_updates = {field_path1: value} + if do_transform: + field_updates[field_path2] = SERVER_TIMESTAMP + + write_pbs = self._call_fut(document_path, field_updates, option) + + map_pb = document_pb2.MapValue(fields={"yum": _value_pb(bytes_value=value)}) + + if do_transform: + field_paths = [field_path1, "blog"] + else: + field_paths = [field_path1] + + expected_update_pb = write_pb2.Write( + update=document_pb2.Document( + name=document_path, fields={"bitez": _value_pb(map_value=map_pb)} + ), + update_mask=common_pb2.DocumentMask(field_paths=field_paths), + **write_kwargs + ) + if isinstance(option, _helpers.ExistsOption): + precondition = common_pb2.Precondition(exists=False) + expected_update_pb.current_document.CopyFrom(precondition) + expected_pbs = [expected_update_pb] + if do_transform: + transform_paths = FieldPath.from_string(field_path2) + server_val = enums.DocumentTransform.FieldTransform.ServerValue + expected_transform_pb = write_pb2.Write( + transform=write_pb2.DocumentTransform( + document=document_path, + field_transforms=[ + write_pb2.DocumentTransform.FieldTransform( + field_path=transform_paths.to_api_repr(), + set_to_server_value=server_val.REQUEST_TIME, + ) + ], + ) + ) + expected_pbs.append(expected_transform_pb) + self.assertEqual(write_pbs, expected_pbs) + + def test_without_option(self): + from google.cloud.firestore_v1.proto import common_pb2 + + precondition = common_pb2.Precondition(exists=True) + self._helper(current_document=precondition) + + def test_with_exists_option(self): + from google.cloud.firestore_v1.client import _helpers + + option = _helpers.ExistsOption(False) + self._helper(option=option) + + def test_update_and_transform(self): + from google.cloud.firestore_v1.proto import common_pb2 + + precondition = common_pb2.Precondition(exists=True) + self._helper(current_document=precondition, do_transform=True) + + +class Test_pb_for_delete(unittest.TestCase): + @staticmethod + def _call_fut(document_path, option): + from google.cloud.firestore_v1._helpers import pb_for_delete + + return pb_for_delete(document_path, option) + + def _helper(self, option=None, **write_kwargs): + from google.cloud.firestore_v1.proto import write_pb2 + + document_path = _make_ref_string(u"chicken", u"philly", u"one", u"two") + write_pb = self._call_fut(document_path, option) + + expected_pb = write_pb2.Write(delete=document_path, **write_kwargs) + self.assertEqual(write_pb, expected_pb) + + def test_without_option(self): + self._helper() + + def test_with_option(self): + from google.protobuf import timestamp_pb2 + from google.cloud.firestore_v1.proto import common_pb2 + from google.cloud.firestore_v1 import _helpers + + update_time = timestamp_pb2.Timestamp(seconds=1309700594, nanos=822211297) + option = _helpers.LastUpdateOption(update_time) + precondition = common_pb2.Precondition(update_time=update_time) + self._helper(option=option, current_document=precondition) + + +class Test_get_transaction_id(unittest.TestCase): + @staticmethod + def _call_fut(transaction, **kwargs): + from google.cloud.firestore_v1._helpers import get_transaction_id + + return get_transaction_id(transaction, **kwargs) + + def test_no_transaction(self): + ret_val = self._call_fut(None) + self.assertIsNone(ret_val) + + def test_invalid_transaction(self): + from google.cloud.firestore_v1.transaction import Transaction + + transaction = Transaction(mock.sentinel.client) + self.assertFalse(transaction.in_progress) + with self.assertRaises(ValueError): + self._call_fut(transaction) + + def test_after_writes_not_allowed(self): + from google.cloud.firestore_v1._helpers import ReadAfterWriteError + from google.cloud.firestore_v1.transaction import Transaction + + transaction = Transaction(mock.sentinel.client) + transaction._id = b"under-hook" + transaction._write_pbs.append(mock.sentinel.write) + + with self.assertRaises(ReadAfterWriteError): + self._call_fut(transaction) + + def test_after_writes_allowed(self): + from google.cloud.firestore_v1.transaction import Transaction + + transaction = Transaction(mock.sentinel.client) + txn_id = b"we-are-0fine" + transaction._id = txn_id + transaction._write_pbs.append(mock.sentinel.write) + + ret_val = self._call_fut(transaction, read_operation=False) + self.assertEqual(ret_val, txn_id) + + def test_good_transaction(self): + from google.cloud.firestore_v1.transaction import Transaction + + transaction = Transaction(mock.sentinel.client) + txn_id = b"doubt-it" + transaction._id = txn_id + self.assertTrue(transaction.in_progress) + + self.assertEqual(self._call_fut(transaction), txn_id) + + +class Test_metadata_with_prefix(unittest.TestCase): + @staticmethod + def _call_fut(database_string): + from google.cloud.firestore_v1._helpers import metadata_with_prefix + + return metadata_with_prefix(database_string) + + def test_it(self): + database_string = u"projects/prahj/databases/dee-bee" + metadata = self._call_fut(database_string) + + self.assertEqual(metadata, [("google-cloud-resource-prefix", database_string)]) + + +class TestWriteOption(unittest.TestCase): + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1._helpers import WriteOption + + return WriteOption + + def _make_one(self, *args, **kwargs): + klass = self._get_target_class() + return klass(*args, **kwargs) + + def test_modify_write(self): + option = self._make_one() + with self.assertRaises(NotImplementedError): + option.modify_write(None) + + +class TestLastUpdateOption(unittest.TestCase): + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1._helpers import LastUpdateOption + + return LastUpdateOption + + def _make_one(self, *args, **kwargs): + klass = self._get_target_class() + return klass(*args, **kwargs) + + def test_constructor(self): + option = self._make_one(mock.sentinel.timestamp) + self.assertIs(option._last_update_time, mock.sentinel.timestamp) + + def test___eq___different_type(self): + option = self._make_one(mock.sentinel.timestamp) + other = object() + self.assertFalse(option == other) + + def test___eq___different_timestamp(self): + option = self._make_one(mock.sentinel.timestamp) + other = self._make_one(mock.sentinel.other_timestamp) + self.assertFalse(option == other) + + def test___eq___same_timestamp(self): + option = self._make_one(mock.sentinel.timestamp) + other = self._make_one(mock.sentinel.timestamp) + self.assertTrue(option == other) + + def test_modify_write_update_time(self): + from google.protobuf import timestamp_pb2 + from google.cloud.firestore_v1.proto import common_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + + timestamp_pb = timestamp_pb2.Timestamp(seconds=683893592, nanos=229362000) + option = self._make_one(timestamp_pb) + write_pb = write_pb2.Write() + ret_val = option.modify_write(write_pb) + + self.assertIsNone(ret_val) + expected_doc = common_pb2.Precondition(update_time=timestamp_pb) + self.assertEqual(write_pb.current_document, expected_doc) + + +class TestExistsOption(unittest.TestCase): + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1._helpers import ExistsOption + + return ExistsOption + + def _make_one(self, *args, **kwargs): + klass = self._get_target_class() + return klass(*args, **kwargs) + + def test_constructor(self): + option = self._make_one(mock.sentinel.totes_bool) + self.assertIs(option._exists, mock.sentinel.totes_bool) + + def test___eq___different_type(self): + option = self._make_one(mock.sentinel.timestamp) + other = object() + self.assertFalse(option == other) + + def test___eq___different_exists(self): + option = self._make_one(True) + other = self._make_one(False) + self.assertFalse(option == other) + + def test___eq___same_exists(self): + option = self._make_one(True) + other = self._make_one(True) + self.assertTrue(option == other) + + def test_modify_write(self): + from google.cloud.firestore_v1.proto import common_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + + for exists in (True, False): + option = self._make_one(exists) + write_pb = write_pb2.Write() + ret_val = option.modify_write(write_pb) + + self.assertIsNone(ret_val) + expected_doc = common_pb2.Precondition(exists=exists) + self.assertEqual(write_pb.current_document, expected_doc) + + +def _value_pb(**kwargs): + from google.cloud.firestore_v1.proto.document_pb2 import Value + + return Value(**kwargs) + + +def _make_ref_string(project, database, *path): + from google.cloud.firestore_v1 import _helpers + + doc_rel_path = _helpers.DOCUMENT_PATH_DELIMITER.join(path) + return u"projects/{}/databases/{}/documents/{}".format( + project, database, doc_rel_path + ) + + +def _make_credentials(): + import google.auth.credentials + + return mock.Mock(spec=google.auth.credentials.Credentials) + + +def _make_client(project="quark"): + from google.cloud.firestore_v1.client import Client + + credentials = _make_credentials() + return Client(project=project, credentials=credentials) + + +def _make_field_path(*fields): + from google.cloud.firestore_v1 import field_path + + return field_path.FieldPath(*fields) diff --git a/firestore/tests/unit/v1/test_batch.py b/firestore/tests/unit/v1/test_batch.py new file mode 100644 index 000000000000..08421d6039dd --- /dev/null +++ b/firestore/tests/unit/v1/test_batch.py @@ -0,0 +1,271 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import mock + + +class TestWriteBatch(unittest.TestCase): + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1.batch import WriteBatch + + return WriteBatch + + def _make_one(self, *args, **kwargs): + klass = self._get_target_class() + return klass(*args, **kwargs) + + def test_constructor(self): + batch = self._make_one(mock.sentinel.client) + self.assertIs(batch._client, mock.sentinel.client) + self.assertEqual(batch._write_pbs, []) + self.assertIsNone(batch.write_results) + self.assertIsNone(batch.commit_time) + + def test__add_write_pbs(self): + batch = self._make_one(mock.sentinel.client) + self.assertEqual(batch._write_pbs, []) + batch._add_write_pbs([mock.sentinel.write1, mock.sentinel.write2]) + self.assertEqual(batch._write_pbs, [mock.sentinel.write1, mock.sentinel.write2]) + + def test_create(self): + from google.cloud.firestore_v1.proto import common_pb2 + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + + client = _make_client() + batch = self._make_one(client) + self.assertEqual(batch._write_pbs, []) + + reference = client.document("this", "one") + document_data = {"a": 10, "b": 2.5} + ret_val = batch.create(reference, document_data) + self.assertIsNone(ret_val) + new_write_pb = write_pb2.Write( + update=document_pb2.Document( + name=reference._document_path, + fields={ + "a": _value_pb(integer_value=document_data["a"]), + "b": _value_pb(double_value=document_data["b"]), + }, + ), + current_document=common_pb2.Precondition(exists=False), + ) + self.assertEqual(batch._write_pbs, [new_write_pb]) + + def test_set(self): + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + + client = _make_client() + batch = self._make_one(client) + self.assertEqual(batch._write_pbs, []) + + reference = client.document("another", "one") + field = "zapzap" + value = u"meadows and flowers" + document_data = {field: value} + ret_val = batch.set(reference, document_data) + self.assertIsNone(ret_val) + new_write_pb = write_pb2.Write( + update=document_pb2.Document( + name=reference._document_path, + fields={field: _value_pb(string_value=value)}, + ) + ) + self.assertEqual(batch._write_pbs, [new_write_pb]) + + def test_set_merge(self): + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + + client = _make_client() + batch = self._make_one(client) + self.assertEqual(batch._write_pbs, []) + + reference = client.document("another", "one") + field = "zapzap" + value = u"meadows and flowers" + document_data = {field: value} + ret_val = batch.set(reference, document_data, merge=True) + self.assertIsNone(ret_val) + new_write_pb = write_pb2.Write( + update=document_pb2.Document( + name=reference._document_path, + fields={field: _value_pb(string_value=value)}, + ), + update_mask={"field_paths": [field]}, + ) + self.assertEqual(batch._write_pbs, [new_write_pb]) + + def test_update(self): + from google.cloud.firestore_v1.proto import common_pb2 + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + + client = _make_client() + batch = self._make_one(client) + self.assertEqual(batch._write_pbs, []) + + reference = client.document("cats", "cradle") + field_path = "head.foot" + value = u"knees toes shoulders" + field_updates = {field_path: value} + + ret_val = batch.update(reference, field_updates) + self.assertIsNone(ret_val) + + map_pb = document_pb2.MapValue(fields={"foot": _value_pb(string_value=value)}) + new_write_pb = write_pb2.Write( + update=document_pb2.Document( + name=reference._document_path, + fields={"head": _value_pb(map_value=map_pb)}, + ), + update_mask=common_pb2.DocumentMask(field_paths=[field_path]), + current_document=common_pb2.Precondition(exists=True), + ) + self.assertEqual(batch._write_pbs, [new_write_pb]) + + def test_delete(self): + from google.cloud.firestore_v1.proto import write_pb2 + + client = _make_client() + batch = self._make_one(client) + self.assertEqual(batch._write_pbs, []) + + reference = client.document("early", "mornin", "dawn", "now") + ret_val = batch.delete(reference) + self.assertIsNone(ret_val) + new_write_pb = write_pb2.Write(delete=reference._document_path) + self.assertEqual(batch._write_pbs, [new_write_pb]) + + def test_commit(self): + from google.protobuf import timestamp_pb2 + from google.cloud.firestore_v1.proto import firestore_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + + # Create a minimal fake GAPIC with a dummy result. + firestore_api = mock.Mock(spec=["commit"]) + timestamp = timestamp_pb2.Timestamp(seconds=1234567, nanos=123456798) + commit_response = firestore_pb2.CommitResponse( + write_results=[write_pb2.WriteResult(), write_pb2.WriteResult()], + commit_time=timestamp, + ) + firestore_api.commit.return_value = commit_response + + # Attach the fake GAPIC to a real client. + client = _make_client("grand") + client._firestore_api_internal = firestore_api + + # Actually make a batch with some mutations and call commit(). + batch = self._make_one(client) + document1 = client.document("a", "b") + batch.create(document1, {"ten": 10, "buck": u"ets"}) + document2 = client.document("c", "d", "e", "f") + batch.delete(document2) + write_pbs = batch._write_pbs[::] + + write_results = batch.commit() + self.assertEqual(write_results, list(commit_response.write_results)) + self.assertEqual(batch.write_results, write_results) + self.assertEqual(batch.commit_time, timestamp) + # Make sure batch has no more "changes". + self.assertEqual(batch._write_pbs, []) + + # Verify the mocks. + firestore_api.commit.assert_called_once_with( + client._database_string, + write_pbs, + transaction=None, + metadata=client._rpc_metadata, + ) + + def test_as_context_mgr_wo_error(self): + from google.protobuf import timestamp_pb2 + from google.cloud.firestore_v1.proto import firestore_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + + firestore_api = mock.Mock(spec=["commit"]) + timestamp = timestamp_pb2.Timestamp(seconds=1234567, nanos=123456798) + commit_response = firestore_pb2.CommitResponse( + write_results=[write_pb2.WriteResult(), write_pb2.WriteResult()], + commit_time=timestamp, + ) + firestore_api.commit.return_value = commit_response + client = _make_client() + client._firestore_api_internal = firestore_api + batch = self._make_one(client) + document1 = client.document("a", "b") + document2 = client.document("c", "d", "e", "f") + + with batch as ctx_mgr: + self.assertIs(ctx_mgr, batch) + ctx_mgr.create(document1, {"ten": 10, "buck": u"ets"}) + ctx_mgr.delete(document2) + write_pbs = batch._write_pbs[::] + + self.assertEqual(batch.write_results, list(commit_response.write_results)) + self.assertEqual(batch.commit_time, timestamp) + # Make sure batch has no more "changes". + self.assertEqual(batch._write_pbs, []) + + # Verify the mocks. + firestore_api.commit.assert_called_once_with( + client._database_string, + write_pbs, + transaction=None, + metadata=client._rpc_metadata, + ) + + def test_as_context_mgr_w_error(self): + firestore_api = mock.Mock(spec=["commit"]) + client = _make_client() + client._firestore_api_internal = firestore_api + batch = self._make_one(client) + document1 = client.document("a", "b") + document2 = client.document("c", "d", "e", "f") + + with self.assertRaises(RuntimeError): + with batch as ctx_mgr: + ctx_mgr.create(document1, {"ten": 10, "buck": u"ets"}) + ctx_mgr.delete(document2) + raise RuntimeError("testing") + + self.assertIsNone(batch.write_results) + self.assertIsNone(batch.commit_time) + # batch still has its changes + self.assertEqual(len(batch._write_pbs), 2) + + firestore_api.commit.assert_not_called() + + +def _value_pb(**kwargs): + from google.cloud.firestore_v1.proto.document_pb2 import Value + + return Value(**kwargs) + + +def _make_credentials(): + import google.auth.credentials + + return mock.Mock(spec=google.auth.credentials.Credentials) + + +def _make_client(project="seventy-nine"): + from google.cloud.firestore_v1.client import Client + + credentials = _make_credentials() + return Client(project=project, credentials=credentials) diff --git a/firestore/tests/unit/v1/test_client.py b/firestore/tests/unit/v1/test_client.py new file mode 100644 index 000000000000..968d13487249 --- /dev/null +++ b/firestore/tests/unit/v1/test_client.py @@ -0,0 +1,629 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import types +import unittest + +import mock + + +class TestClient(unittest.TestCase): + + PROJECT = "my-prahjekt" + + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1.client import Client + + return Client + + def _make_one(self, *args, **kwargs): + klass = self._get_target_class() + return klass(*args, **kwargs) + + def _make_default_one(self): + credentials = _make_credentials() + return self._make_one(project=self.PROJECT, credentials=credentials) + + def test_constructor(self): + from google.cloud.firestore_v1.client import DEFAULT_DATABASE + + credentials = _make_credentials() + client = self._make_one(project=self.PROJECT, credentials=credentials) + self.assertEqual(client.project, self.PROJECT) + self.assertEqual(client._credentials, credentials) + self.assertEqual(client._database, DEFAULT_DATABASE) + + def test_constructor_explicit(self): + credentials = _make_credentials() + database = "now-db" + client = self._make_one( + project=self.PROJECT, credentials=credentials, database=database + ) + self.assertEqual(client.project, self.PROJECT) + self.assertEqual(client._credentials, credentials) + self.assertEqual(client._database, database) + + @mock.patch( + "google.cloud.firestore_v1.gapic.firestore_client." "FirestoreClient", + autospec=True, + return_value=mock.sentinel.firestore_api, + ) + def test__firestore_api_property(self, mock_client): + client = self._make_default_one() + self.assertIsNone(client._firestore_api_internal) + firestore_api = client._firestore_api + self.assertIs(firestore_api, mock_client.return_value) + self.assertIs(firestore_api, client._firestore_api_internal) + mock_client.assert_called_once_with(credentials=client._credentials) + + # Call again to show that it is cached, but call count is still 1. + self.assertIs(client._firestore_api, mock_client.return_value) + self.assertEqual(mock_client.call_count, 1) + + def test___database_string_property(self): + credentials = _make_credentials() + database = "cheeeeez" + client = self._make_one( + project=self.PROJECT, credentials=credentials, database=database + ) + self.assertIsNone(client._database_string_internal) + database_string = client._database_string + expected = "projects/{}/databases/{}".format(client.project, client._database) + self.assertEqual(database_string, expected) + self.assertIs(database_string, client._database_string_internal) + + # Swap it out with a unique value to verify it is cached. + client._database_string_internal = mock.sentinel.cached + self.assertIs(client._database_string, mock.sentinel.cached) + + def test___rpc_metadata_property(self): + credentials = _make_credentials() + database = "quanta" + client = self._make_one( + project=self.PROJECT, credentials=credentials, database=database + ) + + self.assertEqual( + client._rpc_metadata, + [("google-cloud-resource-prefix", client._database_string)], + ) + + def test_collection_factory(self): + from google.cloud.firestore_v1.collection import CollectionReference + + collection_id = "users" + client = self._make_default_one() + collection = client.collection(collection_id) + + self.assertEqual(collection._path, (collection_id,)) + self.assertIs(collection._client, client) + self.assertIsInstance(collection, CollectionReference) + + def test_collection_factory_nested(self): + from google.cloud.firestore_v1.collection import CollectionReference + + client = self._make_default_one() + parts = ("users", "alovelace", "beep") + collection_path = "/".join(parts) + collection1 = client.collection(collection_path) + + self.assertEqual(collection1._path, parts) + self.assertIs(collection1._client, client) + self.assertIsInstance(collection1, CollectionReference) + + # Make sure using segments gives the same result. + collection2 = client.collection(*parts) + self.assertEqual(collection2._path, parts) + self.assertIs(collection2._client, client) + self.assertIsInstance(collection2, CollectionReference) + + def test_document_factory(self): + from google.cloud.firestore_v1.document import DocumentReference + + parts = ("rooms", "roomA") + client = self._make_default_one() + doc_path = "/".join(parts) + document1 = client.document(doc_path) + + self.assertEqual(document1._path, parts) + self.assertIs(document1._client, client) + self.assertIsInstance(document1, DocumentReference) + + # Make sure using segments gives the same result. + document2 = client.document(*parts) + self.assertEqual(document2._path, parts) + self.assertIs(document2._client, client) + self.assertIsInstance(document2, DocumentReference) + + def test_document_factory_nested(self): + from google.cloud.firestore_v1.document import DocumentReference + + client = self._make_default_one() + parts = ("rooms", "roomA", "shoes", "dressy") + doc_path = "/".join(parts) + document1 = client.document(doc_path) + + self.assertEqual(document1._path, parts) + self.assertIs(document1._client, client) + self.assertIsInstance(document1, DocumentReference) + + # Make sure using segments gives the same result. + document2 = client.document(*parts) + self.assertEqual(document2._path, parts) + self.assertIs(document2._client, client) + self.assertIsInstance(document2, DocumentReference) + + def test_field_path(self): + klass = self._get_target_class() + self.assertEqual(klass.field_path("a", "b", "c"), "a.b.c") + + def test_write_option_last_update(self): + from google.protobuf import timestamp_pb2 + from google.cloud.firestore_v1._helpers import LastUpdateOption + + timestamp = timestamp_pb2.Timestamp(seconds=1299767599, nanos=811111097) + + klass = self._get_target_class() + option = klass.write_option(last_update_time=timestamp) + self.assertIsInstance(option, LastUpdateOption) + self.assertEqual(option._last_update_time, timestamp) + + def test_write_option_exists(self): + from google.cloud.firestore_v1._helpers import ExistsOption + + klass = self._get_target_class() + + option1 = klass.write_option(exists=False) + self.assertIsInstance(option1, ExistsOption) + self.assertFalse(option1._exists) + + option2 = klass.write_option(exists=True) + self.assertIsInstance(option2, ExistsOption) + self.assertTrue(option2._exists) + + def test_write_open_neither_arg(self): + from google.cloud.firestore_v1.client import _BAD_OPTION_ERR + + klass = self._get_target_class() + with self.assertRaises(TypeError) as exc_info: + klass.write_option() + + self.assertEqual(exc_info.exception.args, (_BAD_OPTION_ERR,)) + + def test_write_multiple_args(self): + from google.cloud.firestore_v1.client import _BAD_OPTION_ERR + + klass = self._get_target_class() + with self.assertRaises(TypeError) as exc_info: + klass.write_option(exists=False, last_update_time=mock.sentinel.timestamp) + + self.assertEqual(exc_info.exception.args, (_BAD_OPTION_ERR,)) + + def test_write_bad_arg(self): + from google.cloud.firestore_v1.client import _BAD_OPTION_ERR + + klass = self._get_target_class() + with self.assertRaises(TypeError) as exc_info: + klass.write_option(spinach="popeye") + + extra = "{!r} was provided".format("spinach") + self.assertEqual(exc_info.exception.args, (_BAD_OPTION_ERR, extra)) + + def test_collections(self): + from google.api_core.page_iterator import Iterator + from google.api_core.page_iterator import Page + from google.cloud.firestore_v1.collection import CollectionReference + + collection_ids = ["users", "projects"] + client = self._make_default_one() + firestore_api = mock.Mock(spec=["list_collection_ids"]) + client._firestore_api_internal = firestore_api + + class _Iterator(Iterator): + def __init__(self, pages): + super(_Iterator, self).__init__(client=None) + self._pages = pages + + def _next_page(self): + if self._pages: + page, self._pages = self._pages[0], self._pages[1:] + return Page(self, page, self.item_to_value) + + iterator = _Iterator(pages=[collection_ids]) + firestore_api.list_collection_ids.return_value = iterator + + collections = list(client.collections()) + + self.assertEqual(len(collections), len(collection_ids)) + for collection, collection_id in zip(collections, collection_ids): + self.assertIsInstance(collection, CollectionReference) + self.assertEqual(collection.parent, None) + self.assertEqual(collection.id, collection_id) + + firestore_api.list_collection_ids.assert_called_once_with( + client._database_string, metadata=client._rpc_metadata + ) + + def _get_all_helper(self, client, references, document_pbs, **kwargs): + # Create a minimal fake GAPIC with a dummy response. + firestore_api = mock.Mock(spec=["batch_get_documents"]) + response_iterator = iter(document_pbs) + firestore_api.batch_get_documents.return_value = response_iterator + + # Attach the fake GAPIC to a real client. + client._firestore_api_internal = firestore_api + + # Actually call get_all(). + snapshots = client.get_all(references, **kwargs) + self.assertIsInstance(snapshots, types.GeneratorType) + + return list(snapshots) + + def _info_for_get_all(self, data1, data2): + client = self._make_default_one() + document1 = client.document("pineapple", "lamp1") + document2 = client.document("pineapple", "lamp2") + + # Make response protobufs. + document_pb1, read_time = _doc_get_info(document1._document_path, data1) + response1 = _make_batch_response(found=document_pb1, read_time=read_time) + + document_pb2, read_time = _doc_get_info(document2._document_path, data2) + response2 = _make_batch_response(found=document_pb2, read_time=read_time) + + return client, document1, document2, response1, response2 + + def test_get_all(self): + from google.cloud.firestore_v1.proto import common_pb2 + from google.cloud.firestore_v1.document import DocumentSnapshot + + data1 = {"a": u"cheese"} + data2 = {"b": True, "c": 18} + info = self._info_for_get_all(data1, data2) + client, document1, document2, response1, response2 = info + + # Exercise the mocked ``batch_get_documents``. + field_paths = ["a", "b"] + snapshots = self._get_all_helper( + client, + [document1, document2], + [response1, response2], + field_paths=field_paths, + ) + self.assertEqual(len(snapshots), 2) + + snapshot1 = snapshots[0] + self.assertIsInstance(snapshot1, DocumentSnapshot) + self.assertIs(snapshot1._reference, document1) + self.assertEqual(snapshot1._data, data1) + + snapshot2 = snapshots[1] + self.assertIsInstance(snapshot2, DocumentSnapshot) + self.assertIs(snapshot2._reference, document2) + self.assertEqual(snapshot2._data, data2) + + # Verify the call to the mock. + doc_paths = [document1._document_path, document2._document_path] + mask = common_pb2.DocumentMask(field_paths=field_paths) + client._firestore_api.batch_get_documents.assert_called_once_with( + client._database_string, + doc_paths, + mask, + transaction=None, + metadata=client._rpc_metadata, + ) + + def test_get_all_with_transaction(self): + from google.cloud.firestore_v1.document import DocumentSnapshot + + data = {"so-much": 484} + info = self._info_for_get_all(data, {}) + client, document, _, response, _ = info + transaction = client.transaction() + txn_id = b"the-man-is-non-stop" + transaction._id = txn_id + + # Exercise the mocked ``batch_get_documents``. + snapshots = self._get_all_helper( + client, [document], [response], transaction=transaction + ) + self.assertEqual(len(snapshots), 1) + + snapshot = snapshots[0] + self.assertIsInstance(snapshot, DocumentSnapshot) + self.assertIs(snapshot._reference, document) + self.assertEqual(snapshot._data, data) + + # Verify the call to the mock. + doc_paths = [document._document_path] + client._firestore_api.batch_get_documents.assert_called_once_with( + client._database_string, + doc_paths, + None, + transaction=txn_id, + metadata=client._rpc_metadata, + ) + + def test_get_all_unknown_result(self): + from google.cloud.firestore_v1.client import _BAD_DOC_TEMPLATE + + info = self._info_for_get_all({"z": 28.5}, {}) + client, document, _, _, response = info + + # Exercise the mocked ``batch_get_documents``. + with self.assertRaises(ValueError) as exc_info: + self._get_all_helper(client, [document], [response]) + + err_msg = _BAD_DOC_TEMPLATE.format(response.found.name) + self.assertEqual(exc_info.exception.args, (err_msg,)) + + # Verify the call to the mock. + doc_paths = [document._document_path] + client._firestore_api.batch_get_documents.assert_called_once_with( + client._database_string, + doc_paths, + None, + transaction=None, + metadata=client._rpc_metadata, + ) + + def test_get_all_wrong_order(self): + from google.cloud.firestore_v1.document import DocumentSnapshot + + data1 = {"up": 10} + data2 = {"down": -10} + info = self._info_for_get_all(data1, data2) + client, document1, document2, response1, response2 = info + document3 = client.document("pineapple", "lamp3") + response3 = _make_batch_response(missing=document3._document_path) + + # Exercise the mocked ``batch_get_documents``. + snapshots = self._get_all_helper( + client, [document1, document2, document3], [response2, response1, response3] + ) + + self.assertEqual(len(snapshots), 3) + + snapshot1 = snapshots[0] + self.assertIsInstance(snapshot1, DocumentSnapshot) + self.assertIs(snapshot1._reference, document2) + self.assertEqual(snapshot1._data, data2) + + snapshot2 = snapshots[1] + self.assertIsInstance(snapshot2, DocumentSnapshot) + self.assertIs(snapshot2._reference, document1) + self.assertEqual(snapshot2._data, data1) + + self.assertFalse(snapshots[2].exists) + + # Verify the call to the mock. + doc_paths = [ + document1._document_path, + document2._document_path, + document3._document_path, + ] + client._firestore_api.batch_get_documents.assert_called_once_with( + client._database_string, + doc_paths, + None, + transaction=None, + metadata=client._rpc_metadata, + ) + + def test_batch(self): + from google.cloud.firestore_v1.batch import WriteBatch + + client = self._make_default_one() + batch = client.batch() + self.assertIsInstance(batch, WriteBatch) + self.assertIs(batch._client, client) + self.assertEqual(batch._write_pbs, []) + + def test_transaction(self): + from google.cloud.firestore_v1.transaction import Transaction + + client = self._make_default_one() + transaction = client.transaction(max_attempts=3, read_only=True) + self.assertIsInstance(transaction, Transaction) + self.assertEqual(transaction._write_pbs, []) + self.assertEqual(transaction._max_attempts, 3) + self.assertTrue(transaction._read_only) + self.assertIsNone(transaction._id) + + +class Test__reference_info(unittest.TestCase): + @staticmethod + def _call_fut(references): + from google.cloud.firestore_v1.client import _reference_info + + return _reference_info(references) + + def test_it(self): + from google.cloud.firestore_v1.client import Client + + credentials = _make_credentials() + client = Client(project="hi-projject", credentials=credentials) + + reference1 = client.document("a", "b") + reference2 = client.document("a", "b", "c", "d") + reference3 = client.document("a", "b") + reference4 = client.document("f", "g") + + doc_path1 = reference1._document_path + doc_path2 = reference2._document_path + doc_path3 = reference3._document_path + doc_path4 = reference4._document_path + self.assertEqual(doc_path1, doc_path3) + + document_paths, reference_map = self._call_fut( + [reference1, reference2, reference3, reference4] + ) + self.assertEqual(document_paths, [doc_path1, doc_path2, doc_path3, doc_path4]) + # reference3 over-rides reference1. + expected_map = { + doc_path2: reference2, + doc_path3: reference3, + doc_path4: reference4, + } + self.assertEqual(reference_map, expected_map) + + +class Test__get_reference(unittest.TestCase): + @staticmethod + def _call_fut(document_path, reference_map): + from google.cloud.firestore_v1.client import _get_reference + + return _get_reference(document_path, reference_map) + + def test_success(self): + doc_path = "a/b/c" + reference_map = {doc_path: mock.sentinel.reference} + self.assertIs(self._call_fut(doc_path, reference_map), mock.sentinel.reference) + + def test_failure(self): + from google.cloud.firestore_v1.client import _BAD_DOC_TEMPLATE + + doc_path = "1/888/call-now" + with self.assertRaises(ValueError) as exc_info: + self._call_fut(doc_path, {}) + + err_msg = _BAD_DOC_TEMPLATE.format(doc_path) + self.assertEqual(exc_info.exception.args, (err_msg,)) + + +class Test__parse_batch_get(unittest.TestCase): + @staticmethod + def _call_fut(get_doc_response, reference_map, client=mock.sentinel.client): + from google.cloud.firestore_v1.client import _parse_batch_get + + return _parse_batch_get(get_doc_response, reference_map, client) + + @staticmethod + def _dummy_ref_string(): + from google.cloud.firestore_v1.client import DEFAULT_DATABASE + + project = u"bazzzz" + collection_id = u"fizz" + document_id = u"buzz" + return u"projects/{}/databases/{}/documents/{}/{}".format( + project, DEFAULT_DATABASE, collection_id, document_id + ) + + def test_found(self): + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.firestore_v1.document import DocumentSnapshot + + now = datetime.datetime.utcnow() + read_time = _datetime_to_pb_timestamp(now) + delta = datetime.timedelta(seconds=100) + update_time = _datetime_to_pb_timestamp(now - delta) + create_time = _datetime_to_pb_timestamp(now - 2 * delta) + + ref_string = self._dummy_ref_string() + document_pb = document_pb2.Document( + name=ref_string, + fields={ + "foo": document_pb2.Value(double_value=1.5), + "bar": document_pb2.Value(string_value=u"skillz"), + }, + create_time=create_time, + update_time=update_time, + ) + response_pb = _make_batch_response(found=document_pb, read_time=read_time) + + reference_map = {ref_string: mock.sentinel.reference} + snapshot = self._call_fut(response_pb, reference_map) + self.assertIsInstance(snapshot, DocumentSnapshot) + self.assertIs(snapshot._reference, mock.sentinel.reference) + self.assertEqual(snapshot._data, {"foo": 1.5, "bar": u"skillz"}) + self.assertTrue(snapshot._exists) + self.assertEqual(snapshot.read_time, read_time) + self.assertEqual(snapshot.create_time, create_time) + self.assertEqual(snapshot.update_time, update_time) + + def test_missing(self): + ref_string = self._dummy_ref_string() + response_pb = _make_batch_response(missing=ref_string) + + snapshot = self._call_fut(response_pb, {}) + self.assertFalse(snapshot.exists) + + def test_unset_result_type(self): + response_pb = _make_batch_response() + with self.assertRaises(ValueError): + self._call_fut(response_pb, {}) + + def test_unknown_result_type(self): + response_pb = mock.Mock(spec=["WhichOneof"]) + response_pb.WhichOneof.return_value = "zoob_value" + + with self.assertRaises(ValueError): + self._call_fut(response_pb, {}) + + response_pb.WhichOneof.assert_called_once_with("result") + + +class Test__get_doc_mask(unittest.TestCase): + @staticmethod + def _call_fut(field_paths): + from google.cloud.firestore_v1.client import _get_doc_mask + + return _get_doc_mask(field_paths) + + def test_none(self): + self.assertIsNone(self._call_fut(None)) + + def test_paths(self): + from google.cloud.firestore_v1.proto import common_pb2 + + field_paths = ["a.b", "c"] + result = self._call_fut(field_paths) + expected = common_pb2.DocumentMask(field_paths=field_paths) + self.assertEqual(result, expected) + + +def _make_credentials(): + import google.auth.credentials + + return mock.Mock(spec=google.auth.credentials.Credentials) + + +def _make_batch_response(**kwargs): + from google.cloud.firestore_v1.proto import firestore_pb2 + + return firestore_pb2.BatchGetDocumentsResponse(**kwargs) + + +def _doc_get_info(ref_string, values): + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.firestore_v1 import _helpers + + now = datetime.datetime.utcnow() + read_time = _datetime_to_pb_timestamp(now) + delta = datetime.timedelta(seconds=100) + update_time = _datetime_to_pb_timestamp(now - delta) + create_time = _datetime_to_pb_timestamp(now - 2 * delta) + + document_pb = document_pb2.Document( + name=ref_string, + fields=_helpers.encode_dict(values), + create_time=create_time, + update_time=update_time, + ) + + return document_pb, read_time diff --git a/firestore/tests/unit/v1/test_collection.py b/firestore/tests/unit/v1/test_collection.py new file mode 100644 index 000000000000..213b32e13a85 --- /dev/null +++ b/firestore/tests/unit/v1/test_collection.py @@ -0,0 +1,589 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import types +import unittest + +import mock +import six + + +class TestCollectionReference(unittest.TestCase): + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1.collection import CollectionReference + + return CollectionReference + + def _make_one(self, *args, **kwargs): + klass = self._get_target_class() + return klass(*args, **kwargs) + + @staticmethod + def _get_public_methods(klass): + return set( + name + for name, value in six.iteritems(klass.__dict__) + if (not name.startswith("_") and isinstance(value, types.FunctionType)) + ) + + def test_query_method_matching(self): + from google.cloud.firestore_v1.query import Query + + query_methods = self._get_public_methods(Query) + klass = self._get_target_class() + collection_methods = self._get_public_methods(klass) + # Make sure every query method is present on + # ``CollectionReference``. + self.assertLessEqual(query_methods, collection_methods) + + def test_constructor(self): + collection_id1 = "rooms" + document_id = "roomA" + collection_id2 = "messages" + client = mock.sentinel.client + + collection = self._make_one( + collection_id1, document_id, collection_id2, client=client + ) + self.assertIs(collection._client, client) + expected_path = (collection_id1, document_id, collection_id2) + self.assertEqual(collection._path, expected_path) + + def test_constructor_invalid_path(self): + with self.assertRaises(ValueError): + self._make_one() + with self.assertRaises(ValueError): + self._make_one(99, "doc", "bad-collection-id") + with self.assertRaises(ValueError): + self._make_one("bad-document-ID", None, "sub-collection") + with self.assertRaises(ValueError): + self._make_one("Just", "A-Document") + + def test_constructor_invalid_kwarg(self): + with self.assertRaises(TypeError): + self._make_one("Coh-lek-shun", donut=True) + + def test___eq___other_type(self): + client = mock.sentinel.client + collection = self._make_one("name", client=client) + other = object() + self.assertFalse(collection == other) + + def test___eq___different_path_same_client(self): + client = mock.sentinel.client + collection = self._make_one("name", client=client) + other = self._make_one("other", client=client) + self.assertFalse(collection == other) + + def test___eq___same_path_different_client(self): + client = mock.sentinel.client + other_client = mock.sentinel.other_client + collection = self._make_one("name", client=client) + other = self._make_one("name", client=other_client) + self.assertFalse(collection == other) + + def test___eq___same_path_same_client(self): + client = mock.sentinel.client + collection = self._make_one("name", client=client) + other = self._make_one("name", client=client) + self.assertTrue(collection == other) + + def test_id_property(self): + collection_id = "hi-bob" + collection = self._make_one(collection_id) + self.assertEqual(collection.id, collection_id) + + def test_parent_property(self): + from google.cloud.firestore_v1.document import DocumentReference + + collection_id1 = "grocery-store" + document_id = "market" + collection_id2 = "darth" + client = _make_client() + collection = self._make_one( + collection_id1, document_id, collection_id2, client=client + ) + + parent = collection.parent + self.assertIsInstance(parent, DocumentReference) + self.assertIs(parent._client, client) + self.assertEqual(parent._path, (collection_id1, document_id)) + + def test_parent_property_top_level(self): + collection = self._make_one("tahp-leh-vull") + self.assertIsNone(collection.parent) + + def test_document_factory_explicit_id(self): + from google.cloud.firestore_v1.document import DocumentReference + + collection_id = "grocery-store" + document_id = "market" + client = _make_client() + collection = self._make_one(collection_id, client=client) + + child = collection.document(document_id) + self.assertIsInstance(child, DocumentReference) + self.assertIs(child._client, client) + self.assertEqual(child._path, (collection_id, document_id)) + + @mock.patch( + "google.cloud.firestore_v1.collection._auto_id", + return_value="zorpzorpthreezorp012", + ) + def test_document_factory_auto_id(self, mock_auto_id): + from google.cloud.firestore_v1.document import DocumentReference + + collection_name = "space-town" + client = _make_client() + collection = self._make_one(collection_name, client=client) + + child = collection.document() + self.assertIsInstance(child, DocumentReference) + self.assertIs(child._client, client) + self.assertEqual(child._path, (collection_name, mock_auto_id.return_value)) + + mock_auto_id.assert_called_once_with() + + def test__parent_info_top_level(self): + client = _make_client() + collection_id = "soap" + collection = self._make_one(collection_id, client=client) + + parent_path, expected_prefix = collection._parent_info() + + expected_path = "projects/{}/databases/{}/documents".format( + client.project, client._database + ) + self.assertEqual(parent_path, expected_path) + prefix = "{}/{}".format(expected_path, collection_id) + self.assertEqual(expected_prefix, prefix) + + def test__parent_info_nested(self): + collection_id1 = "bar" + document_id = "baz" + collection_id2 = "chunk" + client = _make_client() + collection = self._make_one( + collection_id1, document_id, collection_id2, client=client + ) + + parent_path, expected_prefix = collection._parent_info() + + expected_path = "projects/{}/databases/{}/documents/{}/{}".format( + client.project, client._database, collection_id1, document_id + ) + self.assertEqual(parent_path, expected_path) + prefix = "{}/{}".format(expected_path, collection_id2) + self.assertEqual(expected_prefix, prefix) + + def test_add_auto_assigned(self): + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.document import DocumentReference + from google.cloud.firestore_v1 import SERVER_TIMESTAMP + from google.cloud.firestore_v1._helpers import pbs_for_set_no_merge + + # Create a minimal fake GAPIC add attach it to a real client. + firestore_api = mock.Mock(spec=["create_document", "commit"]) + write_result = mock.Mock( + update_time=mock.sentinel.update_time, spec=["update_time"] + ) + commit_response = mock.Mock( + write_results=[write_result], + spec=["write_results", "commit_time"], + commit_time=mock.sentinel.commit_time, + ) + firestore_api.commit.return_value = commit_response + create_doc_response = document_pb2.Document() + firestore_api.create_document.return_value = create_doc_response + client = _make_client() + client._firestore_api_internal = firestore_api + + # Actually make a collection. + collection = self._make_one("grand-parent", "parent", "child", client=client) + + # Add a dummy response for the fake GAPIC. + parent_path = collection.parent._document_path + auto_assigned_id = "cheezburger" + name = "{}/{}/{}".format(parent_path, collection.id, auto_assigned_id) + create_doc_response = document_pb2.Document(name=name) + create_doc_response.update_time.FromDatetime(datetime.datetime.utcnow()) + firestore_api.create_document.return_value = create_doc_response + + # Actually call add() on our collection; include a transform to make + # sure transforms during adds work. + document_data = {"been": "here", "now": SERVER_TIMESTAMP} + update_time, document_ref = collection.add(document_data) + + # Verify the response and the mocks. + self.assertIs(update_time, mock.sentinel.update_time) + self.assertIsInstance(document_ref, DocumentReference) + self.assertIs(document_ref._client, client) + expected_path = collection._path + (auto_assigned_id,) + self.assertEqual(document_ref._path, expected_path) + + expected_document_pb = document_pb2.Document() + firestore_api.create_document.assert_called_once_with( + parent_path, + collection_id=collection.id, + document_id=None, + document=expected_document_pb, + mask=None, + metadata=client._rpc_metadata, + ) + write_pbs = pbs_for_set_no_merge(document_ref._document_path, document_data) + firestore_api.commit.assert_called_once_with( + client._database_string, + write_pbs, + transaction=None, + metadata=client._rpc_metadata, + ) + + @staticmethod + def _write_pb_for_create(document_path, document_data): + from google.cloud.firestore_v1.proto import common_pb2 + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + from google.cloud.firestore_v1 import _helpers + + return write_pb2.Write( + update=document_pb2.Document( + name=document_path, fields=_helpers.encode_dict(document_data) + ), + current_document=common_pb2.Precondition(exists=False), + ) + + def test_add_explicit_id(self): + from google.cloud.firestore_v1.document import DocumentReference + + # Create a minimal fake GAPIC with a dummy response. + firestore_api = mock.Mock(spec=["commit"]) + write_result = mock.Mock( + update_time=mock.sentinel.update_time, spec=["update_time"] + ) + commit_response = mock.Mock( + write_results=[write_result], + spec=["write_results", "commit_time"], + commit_time=mock.sentinel.commit_time, + ) + firestore_api.commit.return_value = commit_response + + # Attach the fake GAPIC to a real client. + client = _make_client() + client._firestore_api_internal = firestore_api + + # Actually make a collection and call add(). + collection = self._make_one("parent", client=client) + document_data = {"zorp": 208.75, "i-did-not": b"know that"} + doc_id = "child" + update_time, document_ref = collection.add(document_data, document_id=doc_id) + + # Verify the response and the mocks. + self.assertIs(update_time, mock.sentinel.update_time) + self.assertIsInstance(document_ref, DocumentReference) + self.assertIs(document_ref._client, client) + self.assertEqual(document_ref._path, (collection.id, doc_id)) + + write_pb = self._write_pb_for_create(document_ref._document_path, document_data) + firestore_api.commit.assert_called_once_with( + client._database_string, + [write_pb], + transaction=None, + metadata=client._rpc_metadata, + ) + + def test_select(self): + from google.cloud.firestore_v1.query import Query + + collection = self._make_one("collection") + field_paths = ["a", "b"] + query = collection.select(field_paths) + + self.assertIsInstance(query, Query) + self.assertIs(query._parent, collection) + projection_paths = [ + field_ref.field_path for field_ref in query._projection.fields + ] + self.assertEqual(projection_paths, field_paths) + + @staticmethod + def _make_field_filter_pb(field_path, op_string, value): + from google.cloud.firestore_v1.proto import query_pb2 + from google.cloud.firestore_v1 import _helpers + from google.cloud.firestore_v1.query import _enum_from_op_string + + return query_pb2.StructuredQuery.FieldFilter( + field=query_pb2.StructuredQuery.FieldReference(field_path=field_path), + op=_enum_from_op_string(op_string), + value=_helpers.encode_value(value), + ) + + def test_where(self): + from google.cloud.firestore_v1.query import Query + + collection = self._make_one("collection") + field_path = "foo" + op_string = "==" + value = 45 + query = collection.where(field_path, op_string, value) + + self.assertIsInstance(query, Query) + self.assertIs(query._parent, collection) + self.assertEqual(len(query._field_filters), 1) + field_filter_pb = query._field_filters[0] + self.assertEqual( + field_filter_pb, self._make_field_filter_pb(field_path, op_string, value) + ) + + @staticmethod + def _make_order_pb(field_path, direction): + from google.cloud.firestore_v1.proto import query_pb2 + from google.cloud.firestore_v1.query import _enum_from_direction + + return query_pb2.StructuredQuery.Order( + field=query_pb2.StructuredQuery.FieldReference(field_path=field_path), + direction=_enum_from_direction(direction), + ) + + def test_order_by(self): + from google.cloud.firestore_v1.query import Query + + collection = self._make_one("collection") + field_path = "foo" + direction = Query.DESCENDING + query = collection.order_by(field_path, direction=direction) + + self.assertIsInstance(query, Query) + self.assertIs(query._parent, collection) + self.assertEqual(len(query._orders), 1) + order_pb = query._orders[0] + self.assertEqual(order_pb, self._make_order_pb(field_path, direction)) + + def test_limit(self): + from google.cloud.firestore_v1.query import Query + + collection = self._make_one("collection") + limit = 15 + query = collection.limit(limit) + + self.assertIsInstance(query, Query) + self.assertIs(query._parent, collection) + self.assertEqual(query._limit, limit) + + def test_offset(self): + from google.cloud.firestore_v1.query import Query + + collection = self._make_one("collection") + offset = 113 + query = collection.offset(offset) + + self.assertIsInstance(query, Query) + self.assertIs(query._parent, collection) + self.assertEqual(query._offset, offset) + + def test_start_at(self): + from google.cloud.firestore_v1.query import Query + + collection = self._make_one("collection") + doc_fields = {"a": "b"} + query = collection.start_at(doc_fields) + + self.assertIsInstance(query, Query) + self.assertIs(query._parent, collection) + self.assertEqual(query._start_at, (doc_fields, True)) + + def test_start_after(self): + from google.cloud.firestore_v1.query import Query + + collection = self._make_one("collection") + doc_fields = {"d": "foo", "e": 10} + query = collection.start_after(doc_fields) + + self.assertIsInstance(query, Query) + self.assertIs(query._parent, collection) + self.assertEqual(query._start_at, (doc_fields, False)) + + def test_end_before(self): + from google.cloud.firestore_v1.query import Query + + collection = self._make_one("collection") + doc_fields = {"bar": 10.5} + query = collection.end_before(doc_fields) + + self.assertIsInstance(query, Query) + self.assertIs(query._parent, collection) + self.assertEqual(query._end_at, (doc_fields, True)) + + def test_end_at(self): + from google.cloud.firestore_v1.query import Query + + collection = self._make_one("collection") + doc_fields = {"opportunity": True, "reason": 9} + query = collection.end_at(doc_fields) + + self.assertIsInstance(query, Query) + self.assertIs(query._parent, collection) + self.assertEqual(query._end_at, (doc_fields, False)) + + def _list_documents_helper(self, page_size=None): + from google.api_core.page_iterator import Iterator + from google.api_core.page_iterator import Page + from google.cloud.firestore_v1.document import DocumentReference + from google.cloud.firestore_v1.gapic.firestore_client import FirestoreClient + from google.cloud.firestore_v1.proto.document_pb2 import Document + + class _Iterator(Iterator): + def __init__(self, pages): + super(_Iterator, self).__init__(client=None) + self._pages = pages + + def _next_page(self): + if self._pages: + page, self._pages = self._pages[0], self._pages[1:] + return Page(self, page, self.item_to_value) + + client = _make_client() + template = client._database_string + "/documents/{}" + document_ids = ["doc-1", "doc-2"] + documents = [ + Document(name=template.format(document_id)) for document_id in document_ids + ] + iterator = _Iterator(pages=[documents]) + api_client = mock.create_autospec(FirestoreClient) + api_client.list_documents.return_value = iterator + client._firestore_api_internal = api_client + collection = self._make_one("collection", client=client) + + if page_size is not None: + documents = list(collection.list_documents(page_size=page_size)) + else: + documents = list(collection.list_documents()) + + # Verify the response and the mocks. + self.assertEqual(len(documents), len(document_ids)) + for document, document_id in zip(documents, document_ids): + self.assertIsInstance(document, DocumentReference) + self.assertEqual(document.parent, collection) + self.assertEqual(document.id, document_id) + + parent, _ = collection._parent_info() + api_client.list_documents.assert_called_once_with( + parent, + collection.id, + page_size=page_size, + show_missing=True, + metadata=client._rpc_metadata, + ) + + def test_list_documents_wo_page_size(self): + self._list_documents_helper() + + def test_list_documents_w_page_size(self): + self._list_documents_helper(page_size=25) + + @mock.patch("google.cloud.firestore_v1.query.Query", autospec=True) + def test_get(self, query_class): + import warnings + + collection = self._make_one("collection") + with warnings.catch_warnings(record=True) as warned: + get_response = collection.get() + + query_class.assert_called_once_with(collection) + query_instance = query_class.return_value + self.assertIs(get_response, query_instance.stream.return_value) + query_instance.stream.assert_called_once_with(transaction=None) + + # Verify the deprecation + self.assertEqual(len(warned), 1) + self.assertIs(warned[0].category, DeprecationWarning) + + @mock.patch("google.cloud.firestore_v1.query.Query", autospec=True) + def test_get_with_transaction(self, query_class): + import warnings + + collection = self._make_one("collection") + transaction = mock.sentinel.txn + with warnings.catch_warnings(record=True) as warned: + get_response = collection.get(transaction=transaction) + + query_class.assert_called_once_with(collection) + query_instance = query_class.return_value + self.assertIs(get_response, query_instance.stream.return_value) + query_instance.stream.assert_called_once_with(transaction=transaction) + + # Verify the deprecation + self.assertEqual(len(warned), 1) + self.assertIs(warned[0].category, DeprecationWarning) + + @mock.patch("google.cloud.firestore_v1.query.Query", autospec=True) + def test_stream(self, query_class): + collection = self._make_one("collection") + stream_response = collection.stream() + + query_class.assert_called_once_with(collection) + query_instance = query_class.return_value + self.assertIs(stream_response, query_instance.stream.return_value) + query_instance.stream.assert_called_once_with(transaction=None) + + @mock.patch("google.cloud.firestore_v1.query.Query", autospec=True) + def test_stream_with_transaction(self, query_class): + collection = self._make_one("collection") + transaction = mock.sentinel.txn + stream_response = collection.stream(transaction=transaction) + + query_class.assert_called_once_with(collection) + query_instance = query_class.return_value + self.assertIs(stream_response, query_instance.stream.return_value) + query_instance.stream.assert_called_once_with(transaction=transaction) + + @mock.patch("google.cloud.firestore_v1.collection.Watch", autospec=True) + def test_on_snapshot(self, watch): + collection = self._make_one("collection") + collection.on_snapshot(None) + watch.for_query.assert_called_once() + + +class Test__auto_id(unittest.TestCase): + @staticmethod + def _call_fut(): + from google.cloud.firestore_v1.collection import _auto_id + + return _auto_id() + + @mock.patch("random.choice") + def test_it(self, mock_rand_choice): + from google.cloud.firestore_v1.collection import _AUTO_ID_CHARS + + mock_result = "0123456789abcdefghij" + mock_rand_choice.side_effect = list(mock_result) + result = self._call_fut() + self.assertEqual(result, mock_result) + + mock_calls = [mock.call(_AUTO_ID_CHARS)] * 20 + self.assertEqual(mock_rand_choice.mock_calls, mock_calls) + + +def _make_credentials(): + import google.auth.credentials + + return mock.Mock(spec=google.auth.credentials.Credentials) + + +def _make_client(): + from google.cloud.firestore_v1.client import Client + + credentials = _make_credentials() + return Client(project="project-project", credentials=credentials) diff --git a/firestore/tests/unit/v1/test_cross_language.py b/firestore/tests/unit/v1/test_cross_language.py new file mode 100644 index 000000000000..89810f201fbb --- /dev/null +++ b/firestore/tests/unit/v1/test_cross_language.py @@ -0,0 +1,495 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import functools +import glob +import json +import os + +import mock +import pytest + +from google.protobuf import text_format +from google.cloud.firestore_v1.proto import document_pb2 +from google.cloud.firestore_v1.proto import firestore_pb2 +from google.cloud.firestore_v1.proto import test_v1_pb2 +from google.cloud.firestore_v1.proto import write_pb2 + + +def _load_testproto(filename): + with open(filename, "r") as tp_file: + tp_text = tp_file.read() + test_proto = test_v1_pb2.Test() + text_format.Merge(tp_text, test_proto) + shortname = os.path.split(filename)[-1] + test_proto.description = test_proto.description + " (%s)" % shortname + return test_proto + + +_here = os.path.dirname(__file__) +_glob_expr = "{}/testdata/*.textproto".format(_here) +_globs = glob.glob(_glob_expr) +ALL_TESTPROTOS = [_load_testproto(filename) for filename in sorted(_globs)] + +_CREATE_TESTPROTOS = [ + test_proto + for test_proto in ALL_TESTPROTOS + if test_proto.WhichOneof("test") == "create" +] + +_GET_TESTPROTOS = [ + test_proto + for test_proto in ALL_TESTPROTOS + if test_proto.WhichOneof("test") == "get" +] + +_SET_TESTPROTOS = [ + test_proto + for test_proto in ALL_TESTPROTOS + if test_proto.WhichOneof("test") == "set" +] + +_UPDATE_TESTPROTOS = [ + test_proto + for test_proto in ALL_TESTPROTOS + if test_proto.WhichOneof("test") == "update" +] + +_UPDATE_PATHS_TESTPROTOS = [ + test_proto + for test_proto in ALL_TESTPROTOS + if test_proto.WhichOneof("test") == "update_paths" +] + +_DELETE_TESTPROTOS = [ + test_proto + for test_proto in ALL_TESTPROTOS + if test_proto.WhichOneof("test") == "delete" +] + +_LISTEN_TESTPROTOS = [ + test_proto + for test_proto in ALL_TESTPROTOS + if test_proto.WhichOneof("test") == "listen" +] + +_QUERY_TESTPROTOS = [ + test_proto + for test_proto in ALL_TESTPROTOS + if test_proto.WhichOneof("test") == "query" +] + + +def _mock_firestore_api(): + firestore_api = mock.Mock(spec=["commit"]) + commit_response = firestore_pb2.CommitResponse( + write_results=[write_pb2.WriteResult()] + ) + firestore_api.commit.return_value = commit_response + return firestore_api + + +def _make_client_document(firestore_api, testcase): + from google.cloud.firestore_v1 import Client + from google.cloud.firestore_v1.client import DEFAULT_DATABASE + import google.auth.credentials + + _, project, _, database, _, doc_path = testcase.doc_ref_path.split("/", 5) + assert database == DEFAULT_DATABASE + + # Attach the fake GAPIC to a real client. + credentials = mock.Mock(spec=google.auth.credentials.Credentials) + client = Client(project=project, credentials=credentials) + client._firestore_api_internal = firestore_api + return client, client.document(doc_path) + + +def _run_testcase(testcase, call, firestore_api, client): + if getattr(testcase, "is_error", False): + # TODO: is there a subclass of Exception we can check for? + with pytest.raises(Exception): + call() + else: + call() + firestore_api.commit.assert_called_once_with( + client._database_string, + list(testcase.request.writes), + transaction=None, + metadata=client._rpc_metadata, + ) + + +@pytest.mark.parametrize("test_proto", _CREATE_TESTPROTOS) +def test_create_testprotos(test_proto): + testcase = test_proto.create + firestore_api = _mock_firestore_api() + client, document = _make_client_document(firestore_api, testcase) + data = convert_data(json.loads(testcase.json_data)) + call = functools.partial(document.create, data) + _run_testcase(testcase, call, firestore_api, client) + + +@pytest.mark.parametrize("test_proto", _GET_TESTPROTOS) +def test_get_testprotos(test_proto): + testcase = test_proto.get + firestore_api = mock.Mock(spec=["get_document"]) + response = document_pb2.Document() + firestore_api.get_document.return_value = response + client, document = _make_client_document(firestore_api, testcase) + + document.get() # No '.textprotos' for errors, field_paths. + + firestore_api.get_document.assert_called_once_with( + document._document_path, + mask=None, + transaction=None, + metadata=client._rpc_metadata, + ) + + +@pytest.mark.parametrize("test_proto", _SET_TESTPROTOS) +def test_set_testprotos(test_proto): + testcase = test_proto.set + firestore_api = _mock_firestore_api() + client, document = _make_client_document(firestore_api, testcase) + data = convert_data(json.loads(testcase.json_data)) + if testcase.HasField("option"): + merge = convert_set_option(testcase.option) + else: + merge = False + call = functools.partial(document.set, data, merge=merge) + _run_testcase(testcase, call, firestore_api, client) + + +@pytest.mark.parametrize("test_proto", _UPDATE_TESTPROTOS) +def test_update_testprotos(test_proto): + testcase = test_proto.update + firestore_api = _mock_firestore_api() + client, document = _make_client_document(firestore_api, testcase) + data = convert_data(json.loads(testcase.json_data)) + if testcase.HasField("precondition"): + option = convert_precondition(testcase.precondition) + else: + option = None + call = functools.partial(document.update, data, option) + _run_testcase(testcase, call, firestore_api, client) + + +@pytest.mark.skip(reason="Python has no way to call update with a list of field paths.") +@pytest.mark.parametrize("test_proto", _UPDATE_PATHS_TESTPROTOS) +def test_update_paths_testprotos(test_proto): # pragma: NO COVER + pass + + +@pytest.mark.parametrize("test_proto", _DELETE_TESTPROTOS) +def test_delete_testprotos(test_proto): + testcase = test_proto.delete + firestore_api = _mock_firestore_api() + client, document = _make_client_document(firestore_api, testcase) + if testcase.HasField("precondition"): + option = convert_precondition(testcase.precondition) + else: + option = None + call = functools.partial(document.delete, option) + _run_testcase(testcase, call, firestore_api, client) + + +@pytest.mark.parametrize("test_proto", _LISTEN_TESTPROTOS) +def test_listen_testprotos(test_proto): # pragma: NO COVER + # test_proto.listen has 'reponses' messages, + # 'google.firestore_v1.ListenResponse' + # and then an expected list of 'snapshots' (local 'Snapshot'), containing + # 'docs' (list of 'google.firestore_v1.Document'), + # 'changes' (list lof local 'DocChange', and 'read_time' timestamp. + from google.cloud.firestore_v1 import Client + from google.cloud.firestore_v1 import DocumentReference + from google.cloud.firestore_v1 import DocumentSnapshot + from google.cloud.firestore_v1 import Watch + import google.auth.credentials + + testcase = test_proto.listen + testname = test_proto.description + + credentials = mock.Mock(spec=google.auth.credentials.Credentials) + client = Client(project="project", credentials=credentials) + modulename = "google.cloud.firestore_v1.watch" + with mock.patch("%s.Watch.ResumableBidiRpc" % modulename, DummyRpc): + with mock.patch( + "%s.Watch.BackgroundConsumer" % modulename, DummyBackgroundConsumer + ): + with mock.patch( # conformance data sets WATCH_TARGET_ID to 1 + "%s.WATCH_TARGET_ID" % modulename, 1 + ): + snapshots = [] + + def callback(keys, applied_changes, read_time): + snapshots.append((keys, applied_changes, read_time)) + + query = DummyQuery(client=client) + watch = Watch.for_query( + query, callback, DocumentSnapshot, DocumentReference + ) + # conformance data has db string as this + db_str = "projects/projectID/databases/(default)" + watch._firestore._database_string_internal = db_str + + if testcase.is_error: + try: + for proto in testcase.responses: + watch.on_snapshot(proto) + except RuntimeError: + # listen-target-add-wrong-id.textpro + # listen-target-remove.textpro + pass + + else: + for proto in testcase.responses: + watch.on_snapshot(proto) + + assert len(snapshots) == len(testcase.snapshots) + for i, (expected_snapshot, actual_snapshot) in enumerate( + zip(testcase.snapshots, snapshots) + ): + expected_changes = expected_snapshot.changes + actual_changes = actual_snapshot[1] + if len(expected_changes) != len(actual_changes): + raise AssertionError( + "change length mismatch in %s (snapshot #%s)" + % (testname, i) + ) + for y, (expected_change, actual_change) in enumerate( + zip(expected_changes, actual_changes) + ): + expected_change_kind = expected_change.kind + actual_change_kind = actual_change.type.value + if expected_change_kind != actual_change_kind: + raise AssertionError( + "change type mismatch in %s (snapshot #%s, change #%s')" + % (testname, i, y) + ) + + +@pytest.mark.parametrize("test_proto", _QUERY_TESTPROTOS) +def test_query_testprotos(test_proto): # pragma: NO COVER + testcase = test_proto.query + if testcase.is_error: + with pytest.raises(Exception): + query = parse_query(testcase) + query._to_protobuf() + else: + query = parse_query(testcase) + found = query._to_protobuf() + assert found == testcase.query + + +def convert_data(v): + # Replace the strings 'ServerTimestamp' and 'Delete' with the corresponding + # sentinels. + from google.cloud.firestore_v1 import ArrayRemove + from google.cloud.firestore_v1 import ArrayUnion + from google.cloud.firestore_v1 import DELETE_FIELD + from google.cloud.firestore_v1 import SERVER_TIMESTAMP + + if v == "ServerTimestamp": + return SERVER_TIMESTAMP + elif v == "Delete": + return DELETE_FIELD + elif isinstance(v, list): + if v[0] == "ArrayRemove": + return ArrayRemove([convert_data(e) for e in v[1:]]) + if v[0] == "ArrayUnion": + return ArrayUnion([convert_data(e) for e in v[1:]]) + return [convert_data(e) for e in v] + elif isinstance(v, dict): + return {k: convert_data(v2) for k, v2 in v.items()} + elif v == "NaN": + return float(v) + else: + return v + + +def convert_set_option(option): + from google.cloud.firestore_v1 import _helpers + + if option.fields: + return [ + _helpers.FieldPath(*field.field).to_api_repr() for field in option.fields + ] + + assert option.all + return True + + +def convert_precondition(precond): + from google.cloud.firestore_v1 import Client + + if precond.HasField("exists"): + return Client.write_option(exists=precond.exists) + + assert precond.HasField("update_time") + return Client.write_option(last_update_time=precond.update_time) + + +class DummyRpc(object): # pragma: NO COVER + def __init__(self, listen, initial_request, should_recover): + self.listen = listen + self.initial_request = initial_request + self.should_recover = should_recover + self.closed = False + self.callbacks = [] + + def add_done_callback(self, callback): + self.callbacks.append(callback) + + def close(self): + self.closed = True + + +class DummyBackgroundConsumer(object): # pragma: NO COVER + started = False + stopped = False + is_active = True + + def __init__(self, rpc, on_snapshot): + self._rpc = rpc + self.on_snapshot = on_snapshot + + def start(self): + self.started = True + + def stop(self): + self.stopped = True + self.is_active = False + + +class DummyQuery(object): # pragma: NO COVER + def __init__(self, **kw): + self._client = kw["client"] + self._comparator = lambda x, y: 1 + + def _to_protobuf(self): + from google.cloud.firestore_v1.proto import query_pb2 + + query_kwargs = { + "select": None, + "from": None, + "where": None, + "order_by": None, + "start_at": None, + "end_at": None, + } + return query_pb2.StructuredQuery(**query_kwargs) + + +def parse_query(testcase): + # 'query' testcase contains: + # - 'coll_path': collection ref path. + # - 'clauses': array of one or more 'Clause' elements + # - 'query': the actual google.firestore_v1.StructuredQuery message + # to be constructed. + # - 'is_error' (as other testcases). + # + # 'Clause' elements are unions of: + # - 'select': [field paths] + # - 'where': (field_path, op, json_value) + # - 'order_by': (field_path, direction) + # - 'offset': int + # - 'limit': int + # - 'start_at': 'Cursor' + # - 'start_after': 'Cursor' + # - 'end_at': 'Cursor' + # - 'end_before': 'Cursor' + # + # 'Cursor' contains either: + # - 'doc_snapshot': 'DocSnapshot' + # - 'json_values': [string] + # + # 'DocSnapshot' contains: + # 'path': str + # 'json_data': str + from google.auth.credentials import Credentials + from google.cloud.firestore_v1 import Client + from google.cloud.firestore_v1 import Query + + _directions = {"asc": Query.ASCENDING, "desc": Query.DESCENDING} + + credentials = mock.create_autospec(Credentials) + client = Client("projectID", credentials) + path = parse_path(testcase.coll_path) + collection = client.collection(*path) + query = collection + + for clause in testcase.clauses: + kind = clause.WhichOneof("clause") + + if kind == "select": + field_paths = [ + ".".join(field_path.field) for field_path in clause.select.fields + ] + query = query.select(field_paths) + elif kind == "where": + path = ".".join(clause.where.path.field) + value = convert_data(json.loads(clause.where.json_value)) + query = query.where(path, clause.where.op, value) + elif kind == "order_by": + path = ".".join(clause.order_by.path.field) + direction = clause.order_by.direction + direction = _directions.get(direction, direction) + query = query.order_by(path, direction=direction) + elif kind == "offset": + query = query.offset(clause.offset) + elif kind == "limit": + query = query.limit(clause.limit) + elif kind == "start_at": + cursor = parse_cursor(clause.start_at, client) + query = query.start_at(cursor) + elif kind == "start_after": + cursor = parse_cursor(clause.start_after, client) + query = query.start_after(cursor) + elif kind == "end_at": + cursor = parse_cursor(clause.end_at, client) + query = query.end_at(cursor) + elif kind == "end_before": + cursor = parse_cursor(clause.end_before, client) + query = query.end_before(cursor) + else: # pragma: NO COVER + raise ValueError("Unknown query clause: {}".format(kind)) + + return query + + +def parse_path(path): + _, relative = path.split("documents/") + return relative.split("/") + + +def parse_cursor(cursor, client): + from google.cloud.firestore_v1 import DocumentReference + from google.cloud.firestore_v1 import DocumentSnapshot + + if cursor.HasField("doc_snapshot"): + path = parse_path(cursor.doc_snapshot.path) + doc_ref = DocumentReference(*path, client=client) + + return DocumentSnapshot( + reference=doc_ref, + data=json.loads(cursor.doc_snapshot.json_data), + exists=True, + read_time=None, + create_time=None, + update_time=None, + ) + + values = [json.loads(value) for value in cursor.json_values] + return convert_data(values) diff --git a/firestore/tests/unit/v1/test_document.py b/firestore/tests/unit/v1/test_document.py new file mode 100644 index 000000000000..89a19df674dd --- /dev/null +++ b/firestore/tests/unit/v1/test_document.py @@ -0,0 +1,825 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections +import unittest + +import mock + + +class TestDocumentReference(unittest.TestCase): + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1.document import DocumentReference + + return DocumentReference + + def _make_one(self, *args, **kwargs): + klass = self._get_target_class() + return klass(*args, **kwargs) + + def test_constructor(self): + collection_id1 = "users" + document_id1 = "alovelace" + collection_id2 = "platform" + document_id2 = "*nix" + client = mock.MagicMock() + client.__hash__.return_value = 1234 + + document = self._make_one( + collection_id1, document_id1, collection_id2, document_id2, client=client + ) + self.assertIs(document._client, client) + expected_path = "/".join( + (collection_id1, document_id1, collection_id2, document_id2) + ) + self.assertEqual(document.path, expected_path) + + def test_constructor_invalid_path(self): + with self.assertRaises(ValueError): + self._make_one() + with self.assertRaises(ValueError): + self._make_one(None, "before", "bad-collection-id", "fifteen") + with self.assertRaises(ValueError): + self._make_one("bad-document-ID", None) + with self.assertRaises(ValueError): + self._make_one("Just", "A-Collection", "Sub") + + def test_constructor_invalid_kwarg(self): + with self.assertRaises(TypeError): + self._make_one("Coh-lek-shun", "Dahk-yu-mehnt", burger=18.75) + + def test___copy__(self): + client = _make_client("rain") + document = self._make_one("a", "b", client=client) + # Access the document path so it is copied. + doc_path = document._document_path + self.assertEqual(doc_path, document._document_path_internal) + + new_document = document.__copy__() + self.assertIsNot(new_document, document) + self.assertIs(new_document._client, document._client) + self.assertEqual(new_document._path, document._path) + self.assertEqual( + new_document._document_path_internal, document._document_path_internal + ) + + def test___deepcopy__calls_copy(self): + client = mock.sentinel.client + document = self._make_one("a", "b", client=client) + document.__copy__ = mock.Mock(return_value=mock.sentinel.new_doc, spec=[]) + + unused_memo = {} + new_document = document.__deepcopy__(unused_memo) + self.assertIs(new_document, mock.sentinel.new_doc) + document.__copy__.assert_called_once_with() + + def test__eq__same_type(self): + document1 = self._make_one("X", "YY", client=mock.sentinel.client) + document2 = self._make_one("X", "ZZ", client=mock.sentinel.client) + document3 = self._make_one("X", "YY", client=mock.sentinel.client2) + document4 = self._make_one("X", "YY", client=mock.sentinel.client) + + pairs = ((document1, document2), (document1, document3), (document2, document3)) + for candidate1, candidate2 in pairs: + # We use == explicitly since assertNotEqual would use !=. + equality_val = candidate1 == candidate2 + self.assertFalse(equality_val) + + # Check the only equal one. + self.assertEqual(document1, document4) + self.assertIsNot(document1, document4) + + def test__eq__other_type(self): + document = self._make_one("X", "YY", client=mock.sentinel.client) + other = object() + equality_val = document == other + self.assertFalse(equality_val) + self.assertIs(document.__eq__(other), NotImplemented) + + def test___hash__(self): + client = mock.MagicMock() + client.__hash__.return_value = 234566789 + document = self._make_one("X", "YY", client=client) + self.assertEqual(hash(document), hash(("X", "YY")) + hash(client)) + + def test__ne__same_type(self): + document1 = self._make_one("X", "YY", client=mock.sentinel.client) + document2 = self._make_one("X", "ZZ", client=mock.sentinel.client) + document3 = self._make_one("X", "YY", client=mock.sentinel.client2) + document4 = self._make_one("X", "YY", client=mock.sentinel.client) + + self.assertNotEqual(document1, document2) + self.assertNotEqual(document1, document3) + self.assertNotEqual(document2, document3) + + # We use != explicitly since assertEqual would use ==. + inequality_val = document1 != document4 + self.assertFalse(inequality_val) + self.assertIsNot(document1, document4) + + def test__ne__other_type(self): + document = self._make_one("X", "YY", client=mock.sentinel.client) + other = object() + self.assertNotEqual(document, other) + self.assertIs(document.__ne__(other), NotImplemented) + + def test__document_path_property(self): + project = "hi-its-me-ok-bye" + client = _make_client(project=project) + + collection_id = "then" + document_id = "090909iii" + document = self._make_one(collection_id, document_id, client=client) + doc_path = document._document_path + expected = "projects/{}/databases/{}/documents/{}/{}".format( + project, client._database, collection_id, document_id + ) + self.assertEqual(doc_path, expected) + self.assertIs(document._document_path_internal, doc_path) + + # Make sure value is cached. + document._document_path_internal = mock.sentinel.cached + self.assertIs(document._document_path, mock.sentinel.cached) + + def test__document_path_property_no_client(self): + document = self._make_one("hi", "bye") + self.assertIsNone(document._client) + with self.assertRaises(ValueError): + getattr(document, "_document_path") + + self.assertIsNone(document._document_path_internal) + + def test_id_property(self): + document_id = "867-5309" + document = self._make_one("Co-lek-shun", document_id) + self.assertEqual(document.id, document_id) + + def test_parent_property(self): + from google.cloud.firestore_v1.collection import CollectionReference + + collection_id = "grocery-store" + document_id = "market" + client = _make_client() + document = self._make_one(collection_id, document_id, client=client) + + parent = document.parent + self.assertIsInstance(parent, CollectionReference) + self.assertIs(parent._client, client) + self.assertEqual(parent._path, (collection_id,)) + + def test_collection_factory(self): + from google.cloud.firestore_v1.collection import CollectionReference + + collection_id = "grocery-store" + document_id = "market" + new_collection = "fruits" + client = _make_client() + document = self._make_one(collection_id, document_id, client=client) + + child = document.collection(new_collection) + self.assertIsInstance(child, CollectionReference) + self.assertIs(child._client, client) + self.assertEqual(child._path, (collection_id, document_id, new_collection)) + + @staticmethod + def _write_pb_for_create(document_path, document_data): + from google.cloud.firestore_v1.proto import common_pb2 + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + from google.cloud.firestore_v1 import _helpers + + return write_pb2.Write( + update=document_pb2.Document( + name=document_path, fields=_helpers.encode_dict(document_data) + ), + current_document=common_pb2.Precondition(exists=False), + ) + + @staticmethod + def _make_commit_repsonse(write_results=None): + from google.cloud.firestore_v1.proto import firestore_pb2 + + response = mock.create_autospec(firestore_pb2.CommitResponse) + response.write_results = write_results or [mock.sentinel.write_result] + response.commit_time = mock.sentinel.commit_time + return response + + def test_create(self): + # Create a minimal fake GAPIC with a dummy response. + firestore_api = mock.Mock(spec=["commit"]) + firestore_api.commit.return_value = self._make_commit_repsonse() + + # Attach the fake GAPIC to a real client. + client = _make_client("dignity") + client._firestore_api_internal = firestore_api + + # Actually make a document and call create(). + document = self._make_one("foo", "twelve", client=client) + document_data = {"hello": "goodbye", "count": 99} + write_result = document.create(document_data) + + # Verify the response and the mocks. + self.assertIs(write_result, mock.sentinel.write_result) + write_pb = self._write_pb_for_create(document._document_path, document_data) + firestore_api.commit.assert_called_once_with( + client._database_string, + [write_pb], + transaction=None, + metadata=client._rpc_metadata, + ) + + def test_create_empty(self): + # Create a minimal fake GAPIC with a dummy response. + from google.cloud.firestore_v1.document import DocumentReference + from google.cloud.firestore_v1.document import DocumentSnapshot + + firestore_api = mock.Mock(spec=["commit"]) + document_reference = mock.create_autospec(DocumentReference) + snapshot = mock.create_autospec(DocumentSnapshot) + snapshot.exists = True + document_reference.get.return_value = snapshot + firestore_api.commit.return_value = self._make_commit_repsonse( + write_results=[document_reference] + ) + + # Attach the fake GAPIC to a real client. + client = _make_client("dignity") + client._firestore_api_internal = firestore_api + client.get_all = mock.MagicMock() + client.get_all.exists.return_value = True + + # Actually make a document and call create(). + document = self._make_one("foo", "twelve", client=client) + document_data = {} + write_result = document.create(document_data) + self.assertTrue(write_result.get().exists) + + @staticmethod + def _write_pb_for_set(document_path, document_data, merge): + from google.cloud.firestore_v1.proto import common_pb2 + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + from google.cloud.firestore_v1 import _helpers + + write_pbs = write_pb2.Write( + update=document_pb2.Document( + name=document_path, fields=_helpers.encode_dict(document_data) + ) + ) + if merge: + field_paths = [ + field_path + for field_path, value in _helpers.extract_fields( + document_data, _helpers.FieldPath() + ) + ] + field_paths = [ + field_path.to_api_repr() for field_path in sorted(field_paths) + ] + mask = common_pb2.DocumentMask(field_paths=sorted(field_paths)) + write_pbs.update_mask.CopyFrom(mask) + return write_pbs + + def _set_helper(self, merge=False, **option_kwargs): + # Create a minimal fake GAPIC with a dummy response. + firestore_api = mock.Mock(spec=["commit"]) + firestore_api.commit.return_value = self._make_commit_repsonse() + + # Attach the fake GAPIC to a real client. + client = _make_client("db-dee-bee") + client._firestore_api_internal = firestore_api + + # Actually make a document and call create(). + document = self._make_one("User", "Interface", client=client) + document_data = {"And": 500, "Now": b"\xba\xaa\xaa \xba\xaa\xaa"} + write_result = document.set(document_data, merge) + + # Verify the response and the mocks. + self.assertIs(write_result, mock.sentinel.write_result) + write_pb = self._write_pb_for_set(document._document_path, document_data, merge) + + firestore_api.commit.assert_called_once_with( + client._database_string, + [write_pb], + transaction=None, + metadata=client._rpc_metadata, + ) + + def test_set(self): + self._set_helper() + + def test_set_merge(self): + self._set_helper(merge=True) + + @staticmethod + def _write_pb_for_update(document_path, update_values, field_paths): + from google.cloud.firestore_v1.proto import common_pb2 + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + from google.cloud.firestore_v1 import _helpers + + return write_pb2.Write( + update=document_pb2.Document( + name=document_path, fields=_helpers.encode_dict(update_values) + ), + update_mask=common_pb2.DocumentMask(field_paths=field_paths), + current_document=common_pb2.Precondition(exists=True), + ) + + def _update_helper(self, **option_kwargs): + from google.cloud.firestore_v1.transforms import DELETE_FIELD + + # Create a minimal fake GAPIC with a dummy response. + firestore_api = mock.Mock(spec=["commit"]) + firestore_api.commit.return_value = self._make_commit_repsonse() + + # Attach the fake GAPIC to a real client. + client = _make_client("potato-chip") + client._firestore_api_internal = firestore_api + + # Actually make a document and call create(). + document = self._make_one("baked", "Alaska", client=client) + # "Cheat" and use OrderedDict-s so that iteritems() is deterministic. + field_updates = collections.OrderedDict( + (("hello", 1), ("then.do", False), ("goodbye", DELETE_FIELD)) + ) + if option_kwargs: + option = client.write_option(**option_kwargs) + write_result = document.update(field_updates, option=option) + else: + option = None + write_result = document.update(field_updates) + + # Verify the response and the mocks. + self.assertIs(write_result, mock.sentinel.write_result) + update_values = { + "hello": field_updates["hello"], + "then": {"do": field_updates["then.do"]}, + } + field_paths = list(field_updates.keys()) + write_pb = self._write_pb_for_update( + document._document_path, update_values, sorted(field_paths) + ) + if option is not None: + option.modify_write(write_pb) + firestore_api.commit.assert_called_once_with( + client._database_string, + [write_pb], + transaction=None, + metadata=client._rpc_metadata, + ) + + def test_update_with_exists(self): + with self.assertRaises(ValueError): + self._update_helper(exists=True) + + def test_update(self): + self._update_helper() + + def test_update_with_precondition(self): + from google.protobuf import timestamp_pb2 + + timestamp = timestamp_pb2.Timestamp(seconds=1058655101, nanos=100022244) + self._update_helper(last_update_time=timestamp) + + def test_empty_update(self): + # Create a minimal fake GAPIC with a dummy response. + firestore_api = mock.Mock(spec=["commit"]) + firestore_api.commit.return_value = self._make_commit_repsonse() + + # Attach the fake GAPIC to a real client. + client = _make_client("potato-chip") + client._firestore_api_internal = firestore_api + + # Actually make a document and call create(). + document = self._make_one("baked", "Alaska", client=client) + # "Cheat" and use OrderedDict-s so that iteritems() is deterministic. + field_updates = {} + with self.assertRaises(ValueError): + document.update(field_updates) + + def _delete_helper(self, **option_kwargs): + from google.cloud.firestore_v1.proto import write_pb2 + + # Create a minimal fake GAPIC with a dummy response. + firestore_api = mock.Mock(spec=["commit"]) + firestore_api.commit.return_value = self._make_commit_repsonse() + + # Attach the fake GAPIC to a real client. + client = _make_client("donut-base") + client._firestore_api_internal = firestore_api + + # Actually make a document and call delete(). + document = self._make_one("where", "we-are", client=client) + if option_kwargs: + option = client.write_option(**option_kwargs) + delete_time = document.delete(option=option) + else: + option = None + delete_time = document.delete() + + # Verify the response and the mocks. + self.assertIs(delete_time, mock.sentinel.commit_time) + write_pb = write_pb2.Write(delete=document._document_path) + if option is not None: + option.modify_write(write_pb) + firestore_api.commit.assert_called_once_with( + client._database_string, + [write_pb], + transaction=None, + metadata=client._rpc_metadata, + ) + + def test_delete(self): + self._delete_helper() + + def test_delete_with_option(self): + from google.protobuf import timestamp_pb2 + + timestamp_pb = timestamp_pb2.Timestamp(seconds=1058655101, nanos=100022244) + self._delete_helper(last_update_time=timestamp_pb) + + def _get_helper(self, field_paths=None, use_transaction=False, not_found=False): + from google.api_core.exceptions import NotFound + from google.cloud.firestore_v1.proto import common_pb2 + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.transaction import Transaction + + # Create a minimal fake GAPIC with a dummy response. + create_time = 123 + update_time = 234 + firestore_api = mock.Mock(spec=["get_document"]) + response = mock.create_autospec(document_pb2.Document) + response.fields = {} + response.create_time = create_time + response.update_time = update_time + + if not_found: + firestore_api.get_document.side_effect = NotFound("testing") + else: + firestore_api.get_document.return_value = response + + client = _make_client("donut-base") + client._firestore_api_internal = firestore_api + + document = self._make_one("where", "we-are", client=client) + + if use_transaction: + transaction = Transaction(client) + transaction_id = transaction._id = b"asking-me-2" + else: + transaction = None + + snapshot = document.get(field_paths=field_paths, transaction=transaction) + + self.assertIs(snapshot.reference, document) + if not_found: + self.assertIsNone(snapshot._data) + self.assertFalse(snapshot.exists) + self.assertIsNone(snapshot.read_time) + self.assertIsNone(snapshot.create_time) + self.assertIsNone(snapshot.update_time) + else: + self.assertEqual(snapshot.to_dict(), {}) + self.assertTrue(snapshot.exists) + self.assertIsNone(snapshot.read_time) + self.assertIs(snapshot.create_time, create_time) + self.assertIs(snapshot.update_time, update_time) + + # Verify the request made to the API + if field_paths is not None: + mask = common_pb2.DocumentMask(field_paths=sorted(field_paths)) + else: + mask = None + + if use_transaction: + expected_transaction_id = transaction_id + else: + expected_transaction_id = None + + firestore_api.get_document.assert_called_once_with( + document._document_path, + mask=mask, + transaction=expected_transaction_id, + metadata=client._rpc_metadata, + ) + + def test_get_not_found(self): + self._get_helper(not_found=True) + + def test_get_default(self): + self._get_helper() + + def test_get_w_string_field_path(self): + with self.assertRaises(ValueError): + self._get_helper(field_paths="foo") + + def test_get_with_field_path(self): + self._get_helper(field_paths=["foo"]) + + def test_get_with_multiple_field_paths(self): + self._get_helper(field_paths=["foo", "bar.baz"]) + + def test_get_with_transaction(self): + self._get_helper(use_transaction=True) + + def _collections_helper(self, page_size=None): + from google.api_core.page_iterator import Iterator + from google.api_core.page_iterator import Page + from google.cloud.firestore_v1.collection import CollectionReference + from google.cloud.firestore_v1.gapic.firestore_client import FirestoreClient + + class _Iterator(Iterator): + def __init__(self, pages): + super(_Iterator, self).__init__(client=None) + self._pages = pages + + def _next_page(self): + if self._pages: + page, self._pages = self._pages[0], self._pages[1:] + return Page(self, page, self.item_to_value) + + collection_ids = ["coll-1", "coll-2"] + iterator = _Iterator(pages=[collection_ids]) + api_client = mock.create_autospec(FirestoreClient) + api_client.list_collection_ids.return_value = iterator + + client = _make_client() + client._firestore_api_internal = api_client + + # Actually make a document and call delete(). + document = self._make_one("where", "we-are", client=client) + if page_size is not None: + collections = list(document.collections(page_size=page_size)) + else: + collections = list(document.collections()) + + # Verify the response and the mocks. + self.assertEqual(len(collections), len(collection_ids)) + for collection, collection_id in zip(collections, collection_ids): + self.assertIsInstance(collection, CollectionReference) + self.assertEqual(collection.parent, document) + self.assertEqual(collection.id, collection_id) + + api_client.list_collection_ids.assert_called_once_with( + document._document_path, page_size=page_size, metadata=client._rpc_metadata + ) + + def test_collections_wo_page_size(self): + self._collections_helper() + + def test_collections_w_page_size(self): + self._collections_helper(page_size=10) + + @mock.patch("google.cloud.firestore_v1.document.Watch", autospec=True) + def test_on_snapshot(self, watch): + client = mock.Mock(_database_string="sprinklez", spec=["_database_string"]) + document = self._make_one("yellow", "mellow", client=client) + document.on_snapshot(None) + watch.for_document.assert_called_once() + + +class TestDocumentSnapshot(unittest.TestCase): + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1.document import DocumentSnapshot + + return DocumentSnapshot + + def _make_one(self, *args, **kwargs): + klass = self._get_target_class() + return klass(*args, **kwargs) + + def _make_reference(self, *args, **kwargs): + from google.cloud.firestore_v1.document import DocumentReference + + return DocumentReference(*args, **kwargs) + + def _make_w_ref(self, ref_path=("a", "b"), data={}, exists=True): + client = mock.sentinel.client + reference = self._make_reference(*ref_path, client=client) + return self._make_one( + reference, + data, + exists, + mock.sentinel.read_time, + mock.sentinel.create_time, + mock.sentinel.update_time, + ) + + def test_constructor(self): + client = mock.sentinel.client + reference = self._make_reference("hi", "bye", client=client) + data = {"zoop": 83} + snapshot = self._make_one( + reference, + data, + True, + mock.sentinel.read_time, + mock.sentinel.create_time, + mock.sentinel.update_time, + ) + self.assertIs(snapshot._reference, reference) + self.assertEqual(snapshot._data, data) + self.assertIsNot(snapshot._data, data) # Make sure copied. + self.assertTrue(snapshot._exists) + self.assertIs(snapshot.read_time, mock.sentinel.read_time) + self.assertIs(snapshot.create_time, mock.sentinel.create_time) + self.assertIs(snapshot.update_time, mock.sentinel.update_time) + + def test___eq___other_type(self): + snapshot = self._make_w_ref() + other = object() + self.assertFalse(snapshot == other) + + def test___eq___different_reference_same_data(self): + snapshot = self._make_w_ref(("a", "b")) + other = self._make_w_ref(("c", "d")) + self.assertFalse(snapshot == other) + + def test___eq___same_reference_different_data(self): + snapshot = self._make_w_ref(("a", "b")) + other = self._make_w_ref(("a", "b"), {"foo": "bar"}) + self.assertFalse(snapshot == other) + + def test___eq___same_reference_same_data(self): + snapshot = self._make_w_ref(("a", "b"), {"foo": "bar"}) + other = self._make_w_ref(("a", "b"), {"foo": "bar"}) + self.assertTrue(snapshot == other) + + def test___hash__(self): + from google.protobuf import timestamp_pb2 + + client = mock.MagicMock() + client.__hash__.return_value = 234566789 + reference = self._make_reference("hi", "bye", client=client) + data = {"zoop": 83} + update_time = timestamp_pb2.Timestamp(seconds=123456, nanos=123456789) + snapshot = self._make_one( + reference, data, True, None, mock.sentinel.create_time, update_time + ) + self.assertEqual( + hash(snapshot), hash(reference) + hash(123456) + hash(123456789) + ) + + def test__client_property(self): + reference = self._make_reference( + "ok", "fine", "now", "fore", client=mock.sentinel.client + ) + snapshot = self._make_one(reference, {}, False, None, None, None) + self.assertIs(snapshot._client, mock.sentinel.client) + + def test_exists_property(self): + reference = mock.sentinel.reference + + snapshot1 = self._make_one(reference, {}, False, None, None, None) + self.assertFalse(snapshot1.exists) + snapshot2 = self._make_one(reference, {}, True, None, None, None) + self.assertTrue(snapshot2.exists) + + def test_id_property(self): + document_id = "around" + reference = self._make_reference( + "look", document_id, client=mock.sentinel.client + ) + snapshot = self._make_one(reference, {}, True, None, None, None) + self.assertEqual(snapshot.id, document_id) + self.assertEqual(reference.id, document_id) + + def test_reference_property(self): + snapshot = self._make_one(mock.sentinel.reference, {}, True, None, None, None) + self.assertIs(snapshot.reference, mock.sentinel.reference) + + def test_get(self): + data = {"one": {"bold": "move"}} + snapshot = self._make_one(None, data, True, None, None, None) + + first_read = snapshot.get("one") + second_read = snapshot.get("one") + self.assertEqual(first_read, data.get("one")) + self.assertIsNot(first_read, data.get("one")) + self.assertEqual(first_read, second_read) + self.assertIsNot(first_read, second_read) + + with self.assertRaises(KeyError): + snapshot.get("two") + + def test_nonexistent_snapshot(self): + snapshot = self._make_one(None, None, False, None, None, None) + self.assertIsNone(snapshot.get("one")) + + def test_to_dict(self): + data = {"a": 10, "b": ["definitely", "mutable"], "c": {"45": 50}} + snapshot = self._make_one(None, data, True, None, None, None) + as_dict = snapshot.to_dict() + self.assertEqual(as_dict, data) + self.assertIsNot(as_dict, data) + # Check that the data remains unchanged. + as_dict["b"].append("hi") + self.assertEqual(data, snapshot.to_dict()) + self.assertNotEqual(data, as_dict) + + def test_non_existent(self): + snapshot = self._make_one(None, None, False, None, None, None) + as_dict = snapshot.to_dict() + self.assertIsNone(as_dict) + + +class Test__get_document_path(unittest.TestCase): + @staticmethod + def _call_fut(client, path): + from google.cloud.firestore_v1.document import _get_document_path + + return _get_document_path(client, path) + + def test_it(self): + project = "prah-jekt" + client = _make_client(project=project) + path = ("Some", "Document", "Child", "Shockument") + document_path = self._call_fut(client, path) + + expected = "projects/{}/databases/{}/documents/{}".format( + project, client._database, "/".join(path) + ) + self.assertEqual(document_path, expected) + + +class Test__consume_single_get(unittest.TestCase): + @staticmethod + def _call_fut(response_iterator): + from google.cloud.firestore_v1.document import _consume_single_get + + return _consume_single_get(response_iterator) + + def test_success(self): + response_iterator = iter([mock.sentinel.result]) + result = self._call_fut(response_iterator) + self.assertIs(result, mock.sentinel.result) + + def test_failure_not_enough(self): + response_iterator = iter([]) + with self.assertRaises(ValueError): + self._call_fut(response_iterator) + + def test_failure_too_many(self): + response_iterator = iter([None, None]) + with self.assertRaises(ValueError): + self._call_fut(response_iterator) + + +class Test__first_write_result(unittest.TestCase): + @staticmethod + def _call_fut(write_results): + from google.cloud.firestore_v1.document import _first_write_result + + return _first_write_result(write_results) + + def test_success(self): + from google.protobuf import timestamp_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + + single_result = write_pb2.WriteResult( + update_time=timestamp_pb2.Timestamp(seconds=1368767504, nanos=458000123) + ) + write_results = [single_result] + result = self._call_fut(write_results) + self.assertIs(result, single_result) + + def test_failure_not_enough(self): + write_results = [] + with self.assertRaises(ValueError): + self._call_fut(write_results) + + def test_more_than_one(self): + from google.cloud.firestore_v1.proto import write_pb2 + + result1 = write_pb2.WriteResult() + result2 = write_pb2.WriteResult() + write_results = [result1, result2] + result = self._call_fut(write_results) + self.assertIs(result, result1) + + +def _make_credentials(): + import google.auth.credentials + + return mock.Mock(spec=google.auth.credentials.Credentials) + + +def _make_client(project="project-project"): + from google.cloud.firestore_v1.client import Client + + credentials = _make_credentials() + return Client(project=project, credentials=credentials) diff --git a/firestore/tests/unit/v1/test_field_path.py b/firestore/tests/unit/v1/test_field_path.py new file mode 100644 index 000000000000..5221321ad10c --- /dev/null +++ b/firestore/tests/unit/v1/test_field_path.py @@ -0,0 +1,495 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import mock + + +class Test__tokenize_field_path(unittest.TestCase): + @staticmethod + def _call_fut(path): + from google.cloud.firestore_v1 import field_path + + return field_path._tokenize_field_path(path) + + def _expect(self, path, split_path): + self.assertEqual(list(self._call_fut(path)), split_path) + + def test_w_empty(self): + self._expect("", []) + + def test_w_single_dot(self): + self._expect(".", ["."]) + + def test_w_single_simple(self): + self._expect("abc", ["abc"]) + + def test_w_single_quoted(self): + self._expect("`c*de`", ["`c*de`"]) + + def test_w_quoted_embedded_dot(self): + self._expect("`c*.de`", ["`c*.de`"]) + + def test_w_quoted_escaped_backtick(self): + self._expect(r"`c*\`de`", [r"`c*\`de`"]) + + def test_w_dotted_quoted(self): + self._expect("`*`.`~`", ["`*`", ".", "`~`"]) + + def test_w_dotted(self): + self._expect("a.b.`c*de`", ["a", ".", "b", ".", "`c*de`"]) + + def test_w_dotted_escaped(self): + self._expect("_0.`1`.`+2`", ["_0", ".", "`1`", ".", "`+2`"]) + + def test_w_unconsumed_characters(self): + path = "a~b" + with self.assertRaises(ValueError): + list(self._call_fut(path)) + + +class Test_split_field_path(unittest.TestCase): + @staticmethod + def _call_fut(path): + from google.cloud.firestore_v1 import field_path + + return field_path.split_field_path(path) + + def test_w_single_dot(self): + with self.assertRaises(ValueError): + self._call_fut(".") + + def test_w_leading_dot(self): + with self.assertRaises(ValueError): + self._call_fut(".a.b.c") + + def test_w_trailing_dot(self): + with self.assertRaises(ValueError): + self._call_fut("a.b.") + + def test_w_missing_dot(self): + with self.assertRaises(ValueError): + self._call_fut("a`c*de`f") + + def test_w_half_quoted_field(self): + with self.assertRaises(ValueError): + self._call_fut("`c*de") + + def test_w_empty(self): + self.assertEqual(self._call_fut(""), []) + + def test_w_simple_field(self): + self.assertEqual(self._call_fut("a"), ["a"]) + + def test_w_dotted_field(self): + self.assertEqual(self._call_fut("a.b.cde"), ["a", "b", "cde"]) + + def test_w_quoted_field(self): + self.assertEqual(self._call_fut("a.b.`c*de`"), ["a", "b", "`c*de`"]) + + def test_w_quoted_field_escaped_backtick(self): + self.assertEqual(self._call_fut(r"`c*\`de`"), [r"`c*\`de`"]) + + +class Test_parse_field_path(unittest.TestCase): + @staticmethod + def _call_fut(path): + from google.cloud.firestore_v1 import field_path + + return field_path.parse_field_path(path) + + def test_wo_escaped_names(self): + self.assertEqual(self._call_fut("a.b.c"), ["a", "b", "c"]) + + def test_w_escaped_backtick(self): + self.assertEqual(self._call_fut("`a\\`b`.c.d"), ["a`b", "c", "d"]) + + def test_w_escaped_backslash(self): + self.assertEqual(self._call_fut("`a\\\\b`.c.d"), ["a\\b", "c", "d"]) + + def test_w_first_name_escaped_wo_closing_backtick(self): + with self.assertRaises(ValueError): + self._call_fut("`a\\`b.c.d") + + +class Test_render_field_path(unittest.TestCase): + @staticmethod + def _call_fut(field_names): + from google.cloud.firestore_v1 import field_path + + return field_path.render_field_path(field_names) + + def test_w_empty(self): + self.assertEqual(self._call_fut([]), "") + + def test_w_one_simple(self): + self.assertEqual(self._call_fut(["a"]), "a") + + def test_w_one_starts_w_digit(self): + self.assertEqual(self._call_fut(["0abc"]), "`0abc`") + + def test_w_one_w_non_alphanum(self): + self.assertEqual(self._call_fut(["a b c"]), "`a b c`") + + def test_w_one_w_backtick(self): + self.assertEqual(self._call_fut(["a`b"]), "`a\\`b`") + + def test_w_one_w_backslash(self): + self.assertEqual(self._call_fut(["a\\b"]), "`a\\\\b`") + + def test_multiple(self): + self.assertEqual(self._call_fut(["a", "b", "c"]), "a.b.c") + + +class Test_get_nested_value(unittest.TestCase): + + DATA = { + "top1": {"middle2": {"bottom3": 20, "bottom4": 22}, "middle5": True}, + "top6": b"\x00\x01 foo", + } + + @staticmethod + def _call_fut(path, data): + from google.cloud.firestore_v1 import field_path + + return field_path.get_nested_value(path, data) + + def test_simple(self): + self.assertIs(self._call_fut("top1", self.DATA), self.DATA["top1"]) + + def test_nested(self): + self.assertIs( + self._call_fut("top1.middle2", self.DATA), self.DATA["top1"]["middle2"] + ) + self.assertIs( + self._call_fut("top1.middle2.bottom3", self.DATA), + self.DATA["top1"]["middle2"]["bottom3"], + ) + + def test_missing_top_level(self): + from google.cloud.firestore_v1.field_path import _FIELD_PATH_MISSING_TOP + + field_path = "top8" + with self.assertRaises(KeyError) as exc_info: + self._call_fut(field_path, self.DATA) + + err_msg = _FIELD_PATH_MISSING_TOP.format(field_path) + self.assertEqual(exc_info.exception.args, (err_msg,)) + + def test_missing_key(self): + from google.cloud.firestore_v1.field_path import _FIELD_PATH_MISSING_KEY + + with self.assertRaises(KeyError) as exc_info: + self._call_fut("top1.middle2.nope", self.DATA) + + err_msg = _FIELD_PATH_MISSING_KEY.format("nope", "top1.middle2") + self.assertEqual(exc_info.exception.args, (err_msg,)) + + def test_bad_type(self): + from google.cloud.firestore_v1.field_path import _FIELD_PATH_WRONG_TYPE + + with self.assertRaises(KeyError) as exc_info: + self._call_fut("top6.middle7", self.DATA) + + err_msg = _FIELD_PATH_WRONG_TYPE.format("top6", "middle7") + self.assertEqual(exc_info.exception.args, (err_msg,)) + + +class TestFieldPath(unittest.TestCase): + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1 import field_path + + return field_path.FieldPath + + def _make_one(self, *args): + klass = self._get_target_class() + return klass(*args) + + def test_ctor_w_none_in_part(self): + with self.assertRaises(ValueError): + self._make_one("a", None, "b") + + def test_ctor_w_empty_string_in_part(self): + with self.assertRaises(ValueError): + self._make_one("a", "", "b") + + def test_ctor_w_integer_part(self): + with self.assertRaises(ValueError): + self._make_one("a", 3, "b") + + def test_ctor_w_list(self): + parts = ["a", "b", "c"] + with self.assertRaises(ValueError): + self._make_one(parts) + + def test_ctor_w_tuple(self): + parts = ("a", "b", "c") + with self.assertRaises(ValueError): + self._make_one(parts) + + def test_ctor_w_iterable_part(self): + with self.assertRaises(ValueError): + self._make_one("a", ["a"], "b") + + def test_constructor_w_single_part(self): + field_path = self._make_one("a") + self.assertEqual(field_path.parts, ("a",)) + + def test_constructor_w_multiple_parts(self): + field_path = self._make_one("a", "b", "c") + self.assertEqual(field_path.parts, ("a", "b", "c")) + + def test_ctor_w_invalid_chars_in_part(self): + invalid_parts = ("~", "*", "/", "[", "]", ".") + for invalid_part in invalid_parts: + field_path = self._make_one(invalid_part) + self.assertEqual(field_path.parts, (invalid_part,)) + + def test_ctor_w_double_dots(self): + field_path = self._make_one("a..b") + self.assertEqual(field_path.parts, ("a..b",)) + + def test_ctor_w_unicode(self): + field_path = self._make_one("一", "二", "三") + self.assertEqual(field_path.parts, ("一", "二", "三")) + + def test_from_api_repr_w_empty_string(self): + api_repr = "" + with self.assertRaises(ValueError): + self._get_target_class().from_api_repr(api_repr) + + def test_from_api_repr_w_empty_field_name(self): + api_repr = "a..b" + with self.assertRaises(ValueError): + self._get_target_class().from_api_repr(api_repr) + + def test_from_api_repr_w_invalid_chars(self): + invalid_parts = ("~", "*", "/", "[", "]", ".") + for invalid_part in invalid_parts: + with self.assertRaises(ValueError): + self._get_target_class().from_api_repr(invalid_part) + + def test_from_api_repr_w_ascii_single(self): + api_repr = "a" + field_path = self._get_target_class().from_api_repr(api_repr) + self.assertEqual(field_path.parts, ("a",)) + + def test_from_api_repr_w_ascii_dotted(self): + api_repr = "a.b.c" + field_path = self._get_target_class().from_api_repr(api_repr) + self.assertEqual(field_path.parts, ("a", "b", "c")) + + def test_from_api_repr_w_non_ascii_dotted_non_quoted(self): + api_repr = "a.一" + with self.assertRaises(ValueError): + self._get_target_class().from_api_repr(api_repr) + + def test_from_api_repr_w_non_ascii_dotted_quoted(self): + api_repr = "a.`一`" + field_path = self._get_target_class().from_api_repr(api_repr) + self.assertEqual(field_path.parts, ("a", "一")) + + def test_from_string_w_empty_string(self): + path_string = "" + with self.assertRaises(ValueError): + self._get_target_class().from_string(path_string) + + def test_from_string_w_empty_field_name(self): + path_string = "a..b" + with self.assertRaises(ValueError): + self._get_target_class().from_string(path_string) + + def test_from_string_w_leading_dot(self): + path_string = ".b.c" + with self.assertRaises(ValueError): + self._get_target_class().from_string(path_string) + + def test_from_string_w_trailing_dot(self): + path_string = "a.b." + with self.assertRaises(ValueError): + self._get_target_class().from_string(path_string) + + def test_from_string_w_leading_invalid_chars(self): + invalid_paths = ("~", "*", "/", "[", "]") + for invalid_path in invalid_paths: + field_path = self._get_target_class().from_string(invalid_path) + self.assertEqual(field_path.parts, (invalid_path,)) + + def test_from_string_w_embedded_invalid_chars(self): + invalid_paths = ("a~b", "x*y", "f/g", "h[j", "k]l") + for invalid_path in invalid_paths: + with self.assertRaises(ValueError): + self._get_target_class().from_string(invalid_path) + + def test_from_string_w_ascii_single(self): + path_string = "a" + field_path = self._get_target_class().from_string(path_string) + self.assertEqual(field_path.parts, ("a",)) + + def test_from_string_w_ascii_dotted(self): + path_string = "a.b.c" + field_path = self._get_target_class().from_string(path_string) + self.assertEqual(field_path.parts, ("a", "b", "c")) + + def test_from_string_w_non_ascii_dotted(self): + path_string = "a.一" + field_path = self._get_target_class().from_string(path_string) + self.assertEqual(field_path.parts, ("a", "一")) + + def test___hash___w_single_part(self): + field_path = self._make_one("a") + self.assertEqual(hash(field_path), hash("a")) + + def test___hash___w_multiple_parts(self): + field_path = self._make_one("a", "b") + self.assertEqual(hash(field_path), hash("a.b")) + + def test___hash___w_escaped_parts(self): + field_path = self._make_one("a", "3") + self.assertEqual(hash(field_path), hash("a.`3`")) + + def test___eq___w_matching_type(self): + field_path = self._make_one("a", "b") + string_path = self._get_target_class().from_string("a.b") + self.assertEqual(field_path, string_path) + + def test___eq___w_non_matching_type(self): + field_path = self._make_one("a", "c") + other = mock.Mock() + other.parts = "a", "b" + self.assertNotEqual(field_path, other) + + def test___lt___w_matching_type(self): + field_path = self._make_one("a", "b") + string_path = self._get_target_class().from_string("a.c") + self.assertTrue(field_path < string_path) + + def test___lt___w_non_matching_type(self): + field_path = self._make_one("a", "b") + other = object() + # Python 2 doesn't raise TypeError here, but Python3 does. + self.assertIs(field_path.__lt__(other), NotImplemented) + + def test___add__(self): + path1 = "a123", "b456" + path2 = "c789", "d012" + path3 = "c789.d012" + field_path1 = self._make_one(*path1) + field_path1_string = self._make_one(*path1) + field_path2 = self._make_one(*path2) + field_path1 += field_path2 + field_path1_string += path3 + field_path2 = field_path2 + self._make_one(*path1) + self.assertEqual(field_path1, self._make_one(*(path1 + path2))) + self.assertEqual(field_path2, self._make_one(*(path2 + path1))) + self.assertEqual(field_path1_string, field_path1) + self.assertNotEqual(field_path1, field_path2) + with self.assertRaises(TypeError): + field_path1 + 305 + + def test_to_api_repr_a(self): + parts = "a" + field_path = self._make_one(parts) + self.assertEqual(field_path.to_api_repr(), "a") + + def test_to_api_repr_backtick(self): + parts = "`" + field_path = self._make_one(parts) + self.assertEqual(field_path.to_api_repr(), r"`\``") + + def test_to_api_repr_dot(self): + parts = "." + field_path = self._make_one(parts) + self.assertEqual(field_path.to_api_repr(), "`.`") + + def test_to_api_repr_slash(self): + parts = "\\" + field_path = self._make_one(parts) + self.assertEqual(field_path.to_api_repr(), r"`\\`") + + def test_to_api_repr_double_slash(self): + parts = r"\\" + field_path = self._make_one(parts) + self.assertEqual(field_path.to_api_repr(), r"`\\\\`") + + def test_to_api_repr_underscore(self): + parts = "_33132" + field_path = self._make_one(parts) + self.assertEqual(field_path.to_api_repr(), "_33132") + + def test_to_api_repr_unicode_non_simple(self): + parts = "一" + field_path = self._make_one(parts) + self.assertEqual(field_path.to_api_repr(), "`一`") + + def test_to_api_repr_number_non_simple(self): + parts = "03" + field_path = self._make_one(parts) + self.assertEqual(field_path.to_api_repr(), "`03`") + + def test_to_api_repr_simple_with_dot(self): + field_path = self._make_one("a.b") + self.assertEqual(field_path.to_api_repr(), "`a.b`") + + def test_to_api_repr_non_simple_with_dot(self): + parts = "a.一" + field_path = self._make_one(parts) + self.assertEqual(field_path.to_api_repr(), "`a.一`") + + def test_to_api_repr_simple(self): + parts = "a0332432" + field_path = self._make_one(parts) + self.assertEqual(field_path.to_api_repr(), "a0332432") + + def test_to_api_repr_chain(self): + parts = "a", "`", "\\", "_3", "03", "a03", "\\\\", "a0332432", "一" + field_path = self._make_one(*parts) + self.assertEqual( + field_path.to_api_repr(), r"a.`\``.`\\`._3.`03`.a03.`\\\\`.a0332432.`一`" + ) + + def test_eq_or_parent_same(self): + field_path = self._make_one("a", "b") + other = self._make_one("a", "b") + self.assertTrue(field_path.eq_or_parent(other)) + + def test_eq_or_parent_prefix(self): + field_path = self._make_one("a", "b") + other = self._make_one("a", "b", "c") + self.assertTrue(field_path.eq_or_parent(other)) + self.assertTrue(other.eq_or_parent(field_path)) + + def test_eq_or_parent_no_prefix(self): + field_path = self._make_one("a", "b") + other = self._make_one("d", "e", "f") + self.assertFalse(field_path.eq_or_parent(other)) + self.assertFalse(other.eq_or_parent(field_path)) + + def test_lineage_empty(self): + field_path = self._make_one() + expected = set() + self.assertEqual(field_path.lineage(), expected) + + def test_lineage_single(self): + field_path = self._make_one("a") + expected = set() + self.assertEqual(field_path.lineage(), expected) + + def test_lineage_nested(self): + field_path = self._make_one("a", "b", "c") + expected = set([self._make_one("a"), self._make_one("a", "b")]) + self.assertEqual(field_path.lineage(), expected) diff --git a/firestore/tests/unit/v1/test_order.py b/firestore/tests/unit/v1/test_order.py new file mode 100644 index 000000000000..c37e2470a3ec --- /dev/null +++ b/firestore/tests/unit/v1/test_order.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http:#www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock +import six +import unittest + +from google.cloud.firestore_v1._helpers import encode_value, GeoPoint +from google.cloud.firestore_v1.order import Order +from google.cloud.firestore_v1.order import TypeOrder + +from google.cloud.firestore_v1.proto import document_pb2 + +from google.protobuf import timestamp_pb2 + + +class TestOrder(unittest.TestCase): + + if six.PY2: + assertRaisesRegex = unittest.TestCase.assertRaisesRegexp + + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1.order import Order + + return Order + + def _make_one(self, *args, **kwargs): + klass = self._get_target_class() + return klass(*args, **kwargs) + + def test_order(self): + # Constants used to represent min/max values of storage types. + int_max_value = 2 ** 31 - 1 + int_min_value = -(2 ** 31) + float_min_value = 1.175494351 ** -38 + float_nan = float("nan") + inf = float("inf") + + groups = [None] * 65 + + groups[0] = [nullValue()] + + groups[1] = [_boolean_value(False)] + groups[2] = [_boolean_value(True)] + + # numbers + groups[3] = [_double_value(float_nan), _double_value(float_nan)] + groups[4] = [_double_value(-inf)] + groups[5] = [_int_value(int_min_value - 1)] + groups[6] = [_int_value(int_min_value)] + groups[7] = [_double_value(-1.1)] + # Integers and Doubles order the same. + groups[8] = [_int_value(-1), _double_value(-1.0)] + groups[9] = [_double_value(-float_min_value)] + # zeros all compare the same. + groups[10] = [ + _int_value(0), + _double_value(-0.0), + _double_value(0.0), + _double_value(+0.0), + ] + groups[11] = [_double_value(float_min_value)] + groups[12] = [_int_value(1), _double_value(1.0)] + groups[13] = [_double_value(1.1)] + groups[14] = [_int_value(int_max_value)] + groups[15] = [_int_value(int_max_value + 1)] + groups[16] = [_double_value(inf)] + + groups[17] = [_timestamp_value(123, 0)] + groups[18] = [_timestamp_value(123, 123)] + groups[19] = [_timestamp_value(345, 0)] + + # strings + groups[20] = [_string_value("")] + groups[21] = [_string_value("\u0000\ud7ff\ue000\uffff")] + groups[22] = [_string_value("(╯°□°)╯︵ ┻━┻")] + groups[23] = [_string_value("a")] + groups[24] = [_string_value("abc def")] + # latin small letter e + combining acute accent + latin small letter b + groups[25] = [_string_value("e\u0301b")] + groups[26] = [_string_value("æ")] + # latin small letter e with acute accent + latin small letter a + groups[27] = [_string_value("\u00e9a")] + + # blobs + groups[28] = [_blob_value(b"")] + groups[29] = [_blob_value(b"\x00")] + groups[30] = [_blob_value(b"\x00\x01\x02\x03\x04")] + groups[31] = [_blob_value(b"\x00\x01\x02\x04\x03")] + groups[32] = [_blob_value(b"\x7f")] + + # resource names + groups[33] = [_reference_value("projects/p1/databases/d1/documents/c1/doc1")] + groups[34] = [_reference_value("projects/p1/databases/d1/documents/c1/doc2")] + groups[35] = [ + _reference_value("projects/p1/databases/d1/documents/c1/doc2/c2/doc1") + ] + groups[36] = [ + _reference_value("projects/p1/databases/d1/documents/c1/doc2/c2/doc2") + ] + groups[37] = [_reference_value("projects/p1/databases/d1/documents/c10/doc1")] + groups[38] = [_reference_value("projects/p1/databases/d1/documents/c2/doc1")] + groups[39] = [_reference_value("projects/p2/databases/d2/documents/c1/doc1")] + groups[40] = [_reference_value("projects/p2/databases/d2/documents/c1-/doc1")] + groups[41] = [_reference_value("projects/p2/databases/d3/documents/c1-/doc1")] + + # geo points + groups[42] = [_geoPoint_value(-90, -180)] + groups[43] = [_geoPoint_value(-90, 0)] + groups[44] = [_geoPoint_value(-90, 180)] + groups[45] = [_geoPoint_value(0, -180)] + groups[46] = [_geoPoint_value(0, 0)] + groups[47] = [_geoPoint_value(0, 180)] + groups[48] = [_geoPoint_value(1, -180)] + groups[49] = [_geoPoint_value(1, 0)] + groups[50] = [_geoPoint_value(1, 180)] + groups[51] = [_geoPoint_value(90, -180)] + groups[52] = [_geoPoint_value(90, 0)] + groups[53] = [_geoPoint_value(90, 180)] + + # arrays + groups[54] = [_array_value()] + groups[55] = [_array_value(["bar"])] + groups[56] = [_array_value(["foo"])] + groups[57] = [_array_value(["foo", 0])] + groups[58] = [_array_value(["foo", 1])] + groups[59] = [_array_value(["foo", "0"])] + + # objects + groups[60] = [_object_value({"bar": 0})] + groups[61] = [_object_value({"bar": 0, "foo": 1})] + groups[62] = [_object_value({"bar": 1})] + groups[63] = [_object_value({"bar": 2})] + groups[64] = [_object_value({"bar": "0"})] + + target = self._make_one() + + for i in range(len(groups)): + for left in groups[i]: + for j in range(len(groups)): + for right in groups[j]: + expected = Order._compare_to(i, j) + + self.assertEqual( + target.compare(left, right), + expected, + "comparing L->R {} ({}) to {} ({})".format( + i, left, j, right + ), + ) + + expected = Order._compare_to(j, i) + self.assertEqual( + target.compare(right, left), + expected, + "comparing R->L {} ({}) to {} ({})".format( + j, right, i, left + ), + ) + + def test_typeorder_type_failure(self): + target = self._make_one() + left = mock.Mock() + left.WhichOneof.return_value = "imaginary-type" + + with self.assertRaisesRegex(ValueError, "Could not detect value"): + target.compare(left, mock.Mock()) + + def test_failure_to_find_type(self): + target = self._make_one() + left = mock.Mock() + left.WhichOneof.return_value = "imaginary-type" + right = mock.Mock() + # Patch from value to get to the deep compare. Since left is a bad type + # expect this to fail with value error. + with mock.patch.object(TypeOrder, "from_value") as to: + to.value = None + with self.assertRaisesRegex(ValueError, "'Unknown ``value_type``"): + target.compare(left, right) + + def test_compare_objects_different_keys(self): + left = _object_value({"foo": 0}) + right = _object_value({"bar": 0}) + + target = self._make_one() + target.compare(left, right) + + +def _boolean_value(b): + return encode_value(b) + + +def _double_value(d): + return encode_value(d) + + +def _int_value(l): + return encode_value(l) + + +def _string_value(s): + if not isinstance(s, six.text_type): + s = six.u(s) + return encode_value(s) + + +def _reference_value(r): + return document_pb2.Value(reference_value=r) + + +def _blob_value(b): + return encode_value(b) + + +def nullValue(): + return encode_value(None) + + +def _timestamp_value(seconds, nanos): + return document_pb2.Value( + timestamp_value=timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos) + ) + + +def _geoPoint_value(latitude, longitude): + return encode_value(GeoPoint(latitude, longitude)) + + +def _array_value(values=[]): + return encode_value(values) + + +def _object_value(keysAndValues): + return encode_value(keysAndValues) diff --git a/firestore/tests/unit/v1/test_query.py b/firestore/tests/unit/v1/test_query.py new file mode 100644 index 000000000000..c67c053c7765 --- /dev/null +++ b/firestore/tests/unit/v1/test_query.py @@ -0,0 +1,1587 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import types +import unittest + +import mock +import six + + +class TestQuery(unittest.TestCase): + + if six.PY2: + assertRaisesRegex = unittest.TestCase.assertRaisesRegexp + + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1.query import Query + + return Query + + def _make_one(self, *args, **kwargs): + klass = self._get_target_class() + return klass(*args, **kwargs) + + def test_constructor_defaults(self): + query = self._make_one(mock.sentinel.parent) + self.assertIs(query._parent, mock.sentinel.parent) + self.assertIsNone(query._projection) + self.assertEqual(query._field_filters, ()) + self.assertEqual(query._orders, ()) + self.assertIsNone(query._limit) + self.assertIsNone(query._offset) + self.assertIsNone(query._start_at) + self.assertIsNone(query._end_at) + + def _make_one_all_fields(self, limit=9876, offset=12, skip_fields=(), parent=None): + kwargs = { + "projection": mock.sentinel.projection, + "field_filters": mock.sentinel.filters, + "orders": mock.sentinel.orders, + "limit": limit, + "offset": offset, + "start_at": mock.sentinel.start_at, + "end_at": mock.sentinel.end_at, + } + for field in skip_fields: + kwargs.pop(field) + if parent is None: + parent = mock.sentinel.parent + return self._make_one(parent, **kwargs) + + def test_constructor_explicit(self): + limit = 234 + offset = 56 + query = self._make_one_all_fields(limit=limit, offset=offset) + self.assertIs(query._parent, mock.sentinel.parent) + self.assertIs(query._projection, mock.sentinel.projection) + self.assertIs(query._field_filters, mock.sentinel.filters) + self.assertEqual(query._orders, mock.sentinel.orders) + self.assertEqual(query._limit, limit) + self.assertEqual(query._offset, offset) + self.assertIs(query._start_at, mock.sentinel.start_at) + self.assertIs(query._end_at, mock.sentinel.end_at) + + def test__client_property(self): + parent = mock.Mock(_client=mock.sentinel.client, spec=["_client"]) + query = self._make_one(parent) + self.assertIs(query._client, mock.sentinel.client) + + def test___eq___other_type(self): + client = self._make_one_all_fields() + other = object() + self.assertFalse(client == other) + + def test___eq___different_parent(self): + parent = mock.sentinel.parent + other_parent = mock.sentinel.other_parent + client = self._make_one_all_fields(parent=parent) + other = self._make_one_all_fields(parent=other_parent) + self.assertFalse(client == other) + + def test___eq___different_projection(self): + parent = mock.sentinel.parent + client = self._make_one_all_fields(parent=parent, skip_fields=("projection",)) + client._projection = mock.sentinel.projection + other = self._make_one_all_fields(parent=parent, skip_fields=("projection",)) + other._projection = mock.sentinel.other_projection + self.assertFalse(client == other) + + def test___eq___different_field_filters(self): + parent = mock.sentinel.parent + client = self._make_one_all_fields( + parent=parent, skip_fields=("field_filters",) + ) + client._field_filters = mock.sentinel.field_filters + other = self._make_one_all_fields(parent=parent, skip_fields=("field_filters",)) + other._field_filters = mock.sentinel.other_field_filters + self.assertFalse(client == other) + + def test___eq___different_orders(self): + parent = mock.sentinel.parent + client = self._make_one_all_fields(parent=parent, skip_fields=("orders",)) + client._orders = mock.sentinel.orders + other = self._make_one_all_fields(parent=parent, skip_fields=("orders",)) + other._orders = mock.sentinel.other_orders + self.assertFalse(client == other) + + def test___eq___different_limit(self): + parent = mock.sentinel.parent + client = self._make_one_all_fields(parent=parent, limit=10) + other = self._make_one_all_fields(parent=parent, limit=20) + self.assertFalse(client == other) + + def test___eq___different_offset(self): + parent = mock.sentinel.parent + client = self._make_one_all_fields(parent=parent, offset=10) + other = self._make_one_all_fields(parent=parent, offset=20) + self.assertFalse(client == other) + + def test___eq___different_start_at(self): + parent = mock.sentinel.parent + client = self._make_one_all_fields(parent=parent, skip_fields=("start_at",)) + client._start_at = mock.sentinel.start_at + other = self._make_one_all_fields(parent=parent, skip_fields=("start_at",)) + other._start_at = mock.sentinel.other_start_at + self.assertFalse(client == other) + + def test___eq___different_end_at(self): + parent = mock.sentinel.parent + client = self._make_one_all_fields(parent=parent, skip_fields=("end_at",)) + client._end_at = mock.sentinel.end_at + other = self._make_one_all_fields(parent=parent, skip_fields=("end_at",)) + other._end_at = mock.sentinel.other_end_at + self.assertFalse(client == other) + + def test___eq___hit(self): + client = self._make_one_all_fields() + other = self._make_one_all_fields() + self.assertTrue(client == other) + + def _compare_queries(self, query1, query2, attr_name): + attrs1 = query1.__dict__.copy() + attrs2 = query2.__dict__.copy() + + attrs1.pop(attr_name) + attrs2.pop(attr_name) + + # The only different should be in ``attr_name``. + self.assertEqual(len(attrs1), len(attrs2)) + for key, value in attrs1.items(): + self.assertIs(value, attrs2[key]) + + @staticmethod + def _make_projection_for_select(field_paths): + from google.cloud.firestore_v1.proto import query_pb2 + + return query_pb2.StructuredQuery.Projection( + fields=[ + query_pb2.StructuredQuery.FieldReference(field_path=field_path) + for field_path in field_paths + ] + ) + + def test_select_invalid_path(self): + query = self._make_one(mock.sentinel.parent) + + with self.assertRaises(ValueError): + query.select(["*"]) + + def test_select(self): + query1 = self._make_one_all_fields() + + field_paths2 = ["foo", "bar"] + query2 = query1.select(field_paths2) + self.assertIsNot(query2, query1) + self.assertIsInstance(query2, self._get_target_class()) + self.assertEqual( + query2._projection, self._make_projection_for_select(field_paths2) + ) + self._compare_queries(query1, query2, "_projection") + + # Make sure it overrides. + field_paths3 = ["foo.baz"] + query3 = query2.select(field_paths3) + self.assertIsNot(query3, query2) + self.assertIsInstance(query3, self._get_target_class()) + self.assertEqual( + query3._projection, self._make_projection_for_select(field_paths3) + ) + self._compare_queries(query2, query3, "_projection") + + def test_where_invalid_path(self): + query = self._make_one(mock.sentinel.parent) + + with self.assertRaises(ValueError): + query.where("*", "==", 1) + + def test_where(self): + from google.cloud.firestore_v1.gapic import enums + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import query_pb2 + + query = self._make_one_all_fields(skip_fields=("field_filters",)) + new_query = query.where("power.level", ">", 9000) + + self.assertIsNot(query, new_query) + self.assertIsInstance(new_query, self._get_target_class()) + self.assertEqual(len(new_query._field_filters), 1) + + field_pb = new_query._field_filters[0] + expected_pb = query_pb2.StructuredQuery.FieldFilter( + field=query_pb2.StructuredQuery.FieldReference(field_path="power.level"), + op=enums.StructuredQuery.FieldFilter.Operator.GREATER_THAN, + value=document_pb2.Value(integer_value=9000), + ) + self.assertEqual(field_pb, expected_pb) + self._compare_queries(query, new_query, "_field_filters") + + def _where_unary_helper(self, value, op_enum, op_string="=="): + from google.cloud.firestore_v1.proto import query_pb2 + + query = self._make_one_all_fields(skip_fields=("field_filters",)) + field_path = "feeeld" + new_query = query.where(field_path, op_string, value) + + self.assertIsNot(query, new_query) + self.assertIsInstance(new_query, self._get_target_class()) + self.assertEqual(len(new_query._field_filters), 1) + + field_pb = new_query._field_filters[0] + expected_pb = query_pb2.StructuredQuery.UnaryFilter( + field=query_pb2.StructuredQuery.FieldReference(field_path=field_path), + op=op_enum, + ) + self.assertEqual(field_pb, expected_pb) + self._compare_queries(query, new_query, "_field_filters") + + def test_where_eq_null(self): + from google.cloud.firestore_v1.gapic import enums + + op_enum = enums.StructuredQuery.UnaryFilter.Operator.IS_NULL + self._where_unary_helper(None, op_enum) + + def test_where_gt_null(self): + with self.assertRaises(ValueError): + self._where_unary_helper(None, 0, op_string=">") + + def test_where_eq_nan(self): + from google.cloud.firestore_v1.gapic import enums + + op_enum = enums.StructuredQuery.UnaryFilter.Operator.IS_NAN + self._where_unary_helper(float("nan"), op_enum) + + def test_where_le_nan(self): + with self.assertRaises(ValueError): + self._where_unary_helper(float("nan"), 0, op_string="<=") + + def test_where_w_delete(self): + from google.cloud.firestore_v1 import DELETE_FIELD + + with self.assertRaises(ValueError): + self._where_unary_helper(DELETE_FIELD, 0) + + def test_where_w_server_timestamp(self): + from google.cloud.firestore_v1 import SERVER_TIMESTAMP + + with self.assertRaises(ValueError): + self._where_unary_helper(SERVER_TIMESTAMP, 0) + + def test_where_w_array_remove(self): + from google.cloud.firestore_v1 import ArrayRemove + + with self.assertRaises(ValueError): + self._where_unary_helper(ArrayRemove([1, 3, 5]), 0) + + def test_where_w_array_union(self): + from google.cloud.firestore_v1 import ArrayUnion + + with self.assertRaises(ValueError): + self._where_unary_helper(ArrayUnion([2, 4, 8]), 0) + + def test_order_by_invalid_path(self): + query = self._make_one(mock.sentinel.parent) + + with self.assertRaises(ValueError): + query.order_by("*") + + def test_order_by(self): + from google.cloud.firestore_v1.gapic import enums + + klass = self._get_target_class() + query1 = self._make_one_all_fields(skip_fields=("orders",)) + + field_path2 = "a" + query2 = query1.order_by(field_path2) + self.assertIsNot(query2, query1) + self.assertIsInstance(query2, klass) + order_pb2 = _make_order_pb( + field_path2, enums.StructuredQuery.Direction.ASCENDING + ) + self.assertEqual(query2._orders, (order_pb2,)) + self._compare_queries(query1, query2, "_orders") + + # Make sure it appends to the orders. + field_path3 = "b" + query3 = query2.order_by(field_path3, direction=klass.DESCENDING) + self.assertIsNot(query3, query2) + self.assertIsInstance(query3, klass) + order_pb3 = _make_order_pb( + field_path3, enums.StructuredQuery.Direction.DESCENDING + ) + self.assertEqual(query3._orders, (order_pb2, order_pb3)) + self._compare_queries(query2, query3, "_orders") + + def test_limit(self): + query1 = self._make_one_all_fields() + + limit2 = 100 + query2 = query1.limit(limit2) + self.assertIsNot(query2, query1) + self.assertIsInstance(query2, self._get_target_class()) + self.assertEqual(query2._limit, limit2) + self._compare_queries(query1, query2, "_limit") + + # Make sure it overrides. + limit3 = 10 + query3 = query2.limit(limit3) + self.assertIsNot(query3, query2) + self.assertIsInstance(query3, self._get_target_class()) + self.assertEqual(query3._limit, limit3) + self._compare_queries(query2, query3, "_limit") + + def test_offset(self): + query1 = self._make_one_all_fields() + + offset2 = 23 + query2 = query1.offset(offset2) + self.assertIsNot(query2, query1) + self.assertIsInstance(query2, self._get_target_class()) + self.assertEqual(query2._offset, offset2) + self._compare_queries(query1, query2, "_offset") + + # Make sure it overrides. + offset3 = 35 + query3 = query2.offset(offset3) + self.assertIsNot(query3, query2) + self.assertIsInstance(query3, self._get_target_class()) + self.assertEqual(query3._offset, offset3) + self._compare_queries(query2, query3, "_offset") + + @staticmethod + def _make_collection(*path, **kw): + from google.cloud.firestore_v1 import collection + + return collection.CollectionReference(*path, **kw) + + @staticmethod + def _make_docref(*path, **kw): + from google.cloud.firestore_v1 import document + + return document.DocumentReference(*path, **kw) + + @staticmethod + def _make_snapshot(docref, values): + from google.cloud.firestore_v1 import document + + return document.DocumentSnapshot(docref, values, True, None, None, None) + + def test__cursor_helper_w_dict(self): + values = {"a": 7, "b": "foo"} + query1 = self._make_one(mock.sentinel.parent) + query2 = query1._cursor_helper(values, True, True) + + self.assertIs(query2._parent, mock.sentinel.parent) + self.assertIsNone(query2._projection) + self.assertEqual(query2._field_filters, ()) + self.assertEqual(query2._orders, query1._orders) + self.assertIsNone(query2._limit) + self.assertIsNone(query2._offset) + self.assertIsNone(query2._end_at) + + cursor, before = query2._start_at + + self.assertEqual(cursor, values) + self.assertTrue(before) + + def test__cursor_helper_w_tuple(self): + values = (7, "foo") + query1 = self._make_one(mock.sentinel.parent) + query2 = query1._cursor_helper(values, False, True) + + self.assertIs(query2._parent, mock.sentinel.parent) + self.assertIsNone(query2._projection) + self.assertEqual(query2._field_filters, ()) + self.assertEqual(query2._orders, query1._orders) + self.assertIsNone(query2._limit) + self.assertIsNone(query2._offset) + self.assertIsNone(query2._end_at) + + cursor, before = query2._start_at + + self.assertEqual(cursor, list(values)) + self.assertFalse(before) + + def test__cursor_helper_w_list(self): + values = [7, "foo"] + query1 = self._make_one(mock.sentinel.parent) + query2 = query1._cursor_helper(values, True, False) + + self.assertIs(query2._parent, mock.sentinel.parent) + self.assertIsNone(query2._projection) + self.assertEqual(query2._field_filters, ()) + self.assertEqual(query2._orders, query1._orders) + self.assertIsNone(query2._limit) + self.assertIsNone(query2._offset) + self.assertIsNone(query2._start_at) + + cursor, before = query2._end_at + + self.assertEqual(cursor, values) + self.assertIsNot(cursor, values) + self.assertTrue(before) + + def test__cursor_helper_w_snapshot_wrong_collection(self): + values = {"a": 7, "b": "foo"} + docref = self._make_docref("there", "doc_id") + snapshot = self._make_snapshot(docref, values) + collection = self._make_collection("here") + query = self._make_one(collection) + + with self.assertRaises(ValueError): + query._cursor_helper(snapshot, False, False) + + def test__cursor_helper_w_snapshot(self): + values = {"a": 7, "b": "foo"} + docref = self._make_docref("here", "doc_id") + snapshot = self._make_snapshot(docref, values) + collection = self._make_collection("here") + query1 = self._make_one(collection) + + query2 = query1._cursor_helper(snapshot, False, False) + + self.assertIs(query2._parent, collection) + self.assertIsNone(query2._projection) + self.assertEqual(query2._field_filters, ()) + self.assertEqual(query2._orders, ()) + self.assertIsNone(query2._limit) + self.assertIsNone(query2._offset) + self.assertIsNone(query2._start_at) + + cursor, before = query2._end_at + + self.assertIs(cursor, snapshot) + self.assertFalse(before) + + def test_start_at(self): + collection = self._make_collection("here") + query1 = self._make_one_all_fields(parent=collection, skip_fields=("orders",)) + query2 = query1.order_by("hi") + + document_fields3 = {"hi": "mom"} + query3 = query2.start_at(document_fields3) + self.assertIsNot(query3, query2) + self.assertIsInstance(query3, self._get_target_class()) + self.assertEqual(query3._start_at, (document_fields3, True)) + self._compare_queries(query2, query3, "_start_at") + + # Make sure it overrides. + query4 = query3.order_by("bye") + values5 = {"hi": "zap", "bye": 88} + docref = self._make_docref("here", "doc_id") + document_fields5 = self._make_snapshot(docref, values5) + query5 = query4.start_at(document_fields5) + self.assertIsNot(query5, query4) + self.assertIsInstance(query5, self._get_target_class()) + self.assertEqual(query5._start_at, (document_fields5, True)) + self._compare_queries(query4, query5, "_start_at") + + def test_start_after(self): + collection = self._make_collection("here") + query1 = self._make_one_all_fields(parent=collection, skip_fields=("orders",)) + query2 = query1.order_by("down") + + document_fields3 = {"down": 99.75} + query3 = query2.start_after(document_fields3) + self.assertIsNot(query3, query2) + self.assertIsInstance(query3, self._get_target_class()) + self.assertEqual(query3._start_at, (document_fields3, False)) + self._compare_queries(query2, query3, "_start_at") + + # Make sure it overrides. + query4 = query3.order_by("out") + values5 = {"down": 100.25, "out": b"\x00\x01"} + docref = self._make_docref("here", "doc_id") + document_fields5 = self._make_snapshot(docref, values5) + query5 = query4.start_after(document_fields5) + self.assertIsNot(query5, query4) + self.assertIsInstance(query5, self._get_target_class()) + self.assertEqual(query5._start_at, (document_fields5, False)) + self._compare_queries(query4, query5, "_start_at") + + def test_end_before(self): + collection = self._make_collection("here") + query1 = self._make_one_all_fields(parent=collection, skip_fields=("orders",)) + query2 = query1.order_by("down") + + document_fields3 = {"down": 99.75} + query3 = query2.end_before(document_fields3) + self.assertIsNot(query3, query2) + self.assertIsInstance(query3, self._get_target_class()) + self.assertEqual(query3._end_at, (document_fields3, True)) + self._compare_queries(query2, query3, "_end_at") + + # Make sure it overrides. + query4 = query3.order_by("out") + values5 = {"down": 100.25, "out": b"\x00\x01"} + docref = self._make_docref("here", "doc_id") + document_fields5 = self._make_snapshot(docref, values5) + query5 = query4.end_before(document_fields5) + self.assertIsNot(query5, query4) + self.assertIsInstance(query5, self._get_target_class()) + self.assertEqual(query5._end_at, (document_fields5, True)) + self._compare_queries(query4, query5, "_end_at") + self._compare_queries(query4, query5, "_end_at") + + def test_end_at(self): + collection = self._make_collection("here") + query1 = self._make_one_all_fields(parent=collection, skip_fields=("orders",)) + query2 = query1.order_by("hi") + + document_fields3 = {"hi": "mom"} + query3 = query2.end_at(document_fields3) + self.assertIsNot(query3, query2) + self.assertIsInstance(query3, self._get_target_class()) + self.assertEqual(query3._end_at, (document_fields3, False)) + self._compare_queries(query2, query3, "_end_at") + + # Make sure it overrides. + query4 = query3.order_by("bye") + values5 = {"hi": "zap", "bye": 88} + docref = self._make_docref("here", "doc_id") + document_fields5 = self._make_snapshot(docref, values5) + query5 = query4.end_at(document_fields5) + self.assertIsNot(query5, query4) + self.assertIsInstance(query5, self._get_target_class()) + self.assertEqual(query5._end_at, (document_fields5, False)) + self._compare_queries(query4, query5, "_end_at") + + def test__filters_pb_empty(self): + query = self._make_one(mock.sentinel.parent) + self.assertEqual(len(query._field_filters), 0) + self.assertIsNone(query._filters_pb()) + + def test__filters_pb_single(self): + from google.cloud.firestore_v1.gapic import enums + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import query_pb2 + + query1 = self._make_one(mock.sentinel.parent) + query2 = query1.where("x.y", ">", 50.5) + filter_pb = query2._filters_pb() + expected_pb = query_pb2.StructuredQuery.Filter( + field_filter=query_pb2.StructuredQuery.FieldFilter( + field=query_pb2.StructuredQuery.FieldReference(field_path="x.y"), + op=enums.StructuredQuery.FieldFilter.Operator.GREATER_THAN, + value=document_pb2.Value(double_value=50.5), + ) + ) + self.assertEqual(filter_pb, expected_pb) + + def test__filters_pb_multi(self): + from google.cloud.firestore_v1.gapic import enums + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import query_pb2 + + query1 = self._make_one(mock.sentinel.parent) + query2 = query1.where("x.y", ">", 50.5) + query3 = query2.where("ABC", "==", 123) + + filter_pb = query3._filters_pb() + op_class = enums.StructuredQuery.FieldFilter.Operator + expected_pb = query_pb2.StructuredQuery.Filter( + composite_filter=query_pb2.StructuredQuery.CompositeFilter( + op=enums.StructuredQuery.CompositeFilter.Operator.AND, + filters=[ + query_pb2.StructuredQuery.Filter( + field_filter=query_pb2.StructuredQuery.FieldFilter( + field=query_pb2.StructuredQuery.FieldReference( + field_path="x.y" + ), + op=op_class.GREATER_THAN, + value=document_pb2.Value(double_value=50.5), + ) + ), + query_pb2.StructuredQuery.Filter( + field_filter=query_pb2.StructuredQuery.FieldFilter( + field=query_pb2.StructuredQuery.FieldReference( + field_path="ABC" + ), + op=op_class.EQUAL, + value=document_pb2.Value(integer_value=123), + ) + ), + ], + ) + ) + self.assertEqual(filter_pb, expected_pb) + + def test__normalize_projection_none(self): + query = self._make_one(mock.sentinel.parent) + self.assertIsNone(query._normalize_projection(None)) + + def test__normalize_projection_empty(self): + projection = self._make_projection_for_select([]) + query = self._make_one(mock.sentinel.parent) + normalized = query._normalize_projection(projection) + field_paths = [field_ref.field_path for field_ref in normalized.fields] + self.assertEqual(field_paths, ["__name__"]) + + def test__normalize_projection_non_empty(self): + projection = self._make_projection_for_select(["a", "b"]) + query = self._make_one(mock.sentinel.parent) + self.assertIs(query._normalize_projection(projection), projection) + + def test__normalize_orders_wo_orders_wo_cursors(self): + query = self._make_one(mock.sentinel.parent) + expected = [] + self.assertEqual(query._normalize_orders(), expected) + + def test__normalize_orders_w_orders_wo_cursors(self): + query = self._make_one(mock.sentinel.parent).order_by("a") + expected = [query._make_order("a", "ASCENDING")] + self.assertEqual(query._normalize_orders(), expected) + + def test__normalize_orders_wo_orders_w_snapshot_cursor(self): + values = {"a": 7, "b": "foo"} + docref = self._make_docref("here", "doc_id") + snapshot = self._make_snapshot(docref, values) + collection = self._make_collection("here") + query = self._make_one(collection).start_at(snapshot) + expected = [query._make_order("__name__", "ASCENDING")] + self.assertEqual(query._normalize_orders(), expected) + + def test__normalize_orders_w_name_orders_w_snapshot_cursor(self): + values = {"a": 7, "b": "foo"} + docref = self._make_docref("here", "doc_id") + snapshot = self._make_snapshot(docref, values) + collection = self._make_collection("here") + query = ( + self._make_one(collection) + .order_by("__name__", "DESCENDING") + .start_at(snapshot) + ) + expected = [query._make_order("__name__", "DESCENDING")] + self.assertEqual(query._normalize_orders(), expected) + + def test__normalize_orders_wo_orders_w_snapshot_cursor_w_neq_exists(self): + values = {"a": 7, "b": "foo"} + docref = self._make_docref("here", "doc_id") + snapshot = self._make_snapshot(docref, values) + collection = self._make_collection("here") + query = ( + self._make_one(collection) + .where("c", "<=", 20) + .order_by("c", "DESCENDING") + .start_at(snapshot) + ) + expected = [ + query._make_order("c", "DESCENDING"), + query._make_order("__name__", "DESCENDING"), + ] + self.assertEqual(query._normalize_orders(), expected) + + def test__normalize_orders_wo_orders_w_snapshot_cursor_w_neq_where(self): + values = {"a": 7, "b": "foo"} + docref = self._make_docref("here", "doc_id") + snapshot = self._make_snapshot(docref, values) + collection = self._make_collection("here") + query = self._make_one(collection).where("c", "<=", 20).end_at(snapshot) + expected = [ + query._make_order("c", "ASCENDING"), + query._make_order("__name__", "ASCENDING"), + ] + self.assertEqual(query._normalize_orders(), expected) + + def test__normalize_cursor_none(self): + query = self._make_one(mock.sentinel.parent) + self.assertIsNone(query._normalize_cursor(None, query._orders)) + + def test__normalize_cursor_no_order(self): + cursor = ([1], True) + query = self._make_one(mock.sentinel.parent) + + with self.assertRaises(ValueError): + query._normalize_cursor(cursor, query._orders) + + def test__normalize_cursor_as_list_mismatched_order(self): + cursor = ([1, 2], True) + query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") + + with self.assertRaises(ValueError): + query._normalize_cursor(cursor, query._orders) + + def test__normalize_cursor_as_dict_mismatched_order(self): + cursor = ({"a": 1}, True) + query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") + + with self.assertRaises(ValueError): + query._normalize_cursor(cursor, query._orders) + + def test__normalize_cursor_w_delete(self): + from google.cloud.firestore_v1 import DELETE_FIELD + + cursor = ([DELETE_FIELD], True) + query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") + + with self.assertRaises(ValueError): + query._normalize_cursor(cursor, query._orders) + + def test__normalize_cursor_w_server_timestamp(self): + from google.cloud.firestore_v1 import SERVER_TIMESTAMP + + cursor = ([SERVER_TIMESTAMP], True) + query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") + + with self.assertRaises(ValueError): + query._normalize_cursor(cursor, query._orders) + + def test__normalize_cursor_w_array_remove(self): + from google.cloud.firestore_v1 import ArrayRemove + + cursor = ([ArrayRemove([1, 3, 5])], True) + query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") + + with self.assertRaises(ValueError): + query._normalize_cursor(cursor, query._orders) + + def test__normalize_cursor_w_array_union(self): + from google.cloud.firestore_v1 import ArrayUnion + + cursor = ([ArrayUnion([2, 4, 8])], True) + query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") + + with self.assertRaises(ValueError): + query._normalize_cursor(cursor, query._orders) + + def test__normalize_cursor_as_list_hit(self): + cursor = ([1], True) + query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") + + self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True)) + + def test__normalize_cursor_as_dict_hit(self): + cursor = ({"b": 1}, True) + query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") + + self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True)) + + def test__normalize_cursor_as_snapshot_hit(self): + values = {"b": 1} + docref = self._make_docref("here", "doc_id") + snapshot = self._make_snapshot(docref, values) + cursor = (snapshot, True) + collection = self._make_collection("here") + query = self._make_one(collection).order_by("b", "ASCENDING") + + self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True)) + + def test__normalize_cursor_w___name___w_reference(self): + db_string = "projects/my-project/database/(default)" + client = mock.Mock(spec=["_database_string"]) + client._database_string = db_string + parent = mock.Mock(spec=["_path", "_client"]) + parent._client = client + parent._path = ["C"] + query = self._make_one(parent).order_by("__name__", "ASCENDING") + docref = self._make_docref("here", "doc_id") + values = {"a": 7} + snapshot = self._make_snapshot(docref, values) + expected = docref + cursor = (snapshot, True) + + self.assertEqual( + query._normalize_cursor(cursor, query._orders), ([expected], True) + ) + + def test__normalize_cursor_w___name___wo_slash(self): + db_string = "projects/my-project/database/(default)" + client = mock.Mock(spec=["_database_string"]) + client._database_string = db_string + parent = mock.Mock(spec=["_path", "_client", "document"]) + parent._client = client + parent._path = ["C"] + document = parent.document.return_value = mock.Mock(spec=[]) + query = self._make_one(parent).order_by("__name__", "ASCENDING") + cursor = (["b"], True) + expected = document + + self.assertEqual( + query._normalize_cursor(cursor, query._orders), ([expected], True) + ) + parent.document.assert_called_once_with("b") + + def test__to_protobuf_all_fields(self): + from google.protobuf import wrappers_pb2 + from google.cloud.firestore_v1.gapic import enums + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import query_pb2 + + parent = mock.Mock(id="cat", spec=["id"]) + query1 = self._make_one(parent) + query2 = query1.select(["X", "Y", "Z"]) + query3 = query2.where("Y", ">", 2.5) + query4 = query3.order_by("X") + query5 = query4.limit(17) + query6 = query5.offset(3) + query7 = query6.start_at({"X": 10}) + query8 = query7.end_at({"X": 25}) + + structured_query_pb = query8._to_protobuf() + query_kwargs = { + "from": [ + query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id) + ], + "select": query_pb2.StructuredQuery.Projection( + fields=[ + query_pb2.StructuredQuery.FieldReference(field_path=field_path) + for field_path in ["X", "Y", "Z"] + ] + ), + "where": query_pb2.StructuredQuery.Filter( + field_filter=query_pb2.StructuredQuery.FieldFilter( + field=query_pb2.StructuredQuery.FieldReference(field_path="Y"), + op=enums.StructuredQuery.FieldFilter.Operator.GREATER_THAN, + value=document_pb2.Value(double_value=2.5), + ) + ), + "order_by": [ + _make_order_pb("X", enums.StructuredQuery.Direction.ASCENDING) + ], + "start_at": query_pb2.Cursor( + values=[document_pb2.Value(integer_value=10)], before=True + ), + "end_at": query_pb2.Cursor(values=[document_pb2.Value(integer_value=25)]), + "offset": 3, + "limit": wrappers_pb2.Int32Value(value=17), + } + expected_pb = query_pb2.StructuredQuery(**query_kwargs) + self.assertEqual(structured_query_pb, expected_pb) + + def test__to_protobuf_select_only(self): + from google.cloud.firestore_v1.proto import query_pb2 + + parent = mock.Mock(id="cat", spec=["id"]) + query1 = self._make_one(parent) + field_paths = ["a.b", "a.c", "d"] + query2 = query1.select(field_paths) + + structured_query_pb = query2._to_protobuf() + query_kwargs = { + "from": [ + query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id) + ], + "select": query_pb2.StructuredQuery.Projection( + fields=[ + query_pb2.StructuredQuery.FieldReference(field_path=field_path) + for field_path in field_paths + ] + ), + } + expected_pb = query_pb2.StructuredQuery(**query_kwargs) + self.assertEqual(structured_query_pb, expected_pb) + + def test__to_protobuf_where_only(self): + from google.cloud.firestore_v1.gapic import enums + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import query_pb2 + + parent = mock.Mock(id="dog", spec=["id"]) + query1 = self._make_one(parent) + query2 = query1.where("a", "==", u"b") + + structured_query_pb = query2._to_protobuf() + query_kwargs = { + "from": [ + query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id) + ], + "where": query_pb2.StructuredQuery.Filter( + field_filter=query_pb2.StructuredQuery.FieldFilter( + field=query_pb2.StructuredQuery.FieldReference(field_path="a"), + op=enums.StructuredQuery.FieldFilter.Operator.EQUAL, + value=document_pb2.Value(string_value=u"b"), + ) + ), + } + expected_pb = query_pb2.StructuredQuery(**query_kwargs) + self.assertEqual(structured_query_pb, expected_pb) + + def test__to_protobuf_order_by_only(self): + from google.cloud.firestore_v1.gapic import enums + from google.cloud.firestore_v1.proto import query_pb2 + + parent = mock.Mock(id="fish", spec=["id"]) + query1 = self._make_one(parent) + query2 = query1.order_by("abc") + + structured_query_pb = query2._to_protobuf() + query_kwargs = { + "from": [ + query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id) + ], + "order_by": [ + _make_order_pb("abc", enums.StructuredQuery.Direction.ASCENDING) + ], + } + expected_pb = query_pb2.StructuredQuery(**query_kwargs) + self.assertEqual(structured_query_pb, expected_pb) + + def test__to_protobuf_start_at_only(self): + # NOTE: "only" is wrong since we must have ``order_by`` as well. + from google.cloud.firestore_v1.gapic import enums + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import query_pb2 + + parent = mock.Mock(id="phish", spec=["id"]) + query = self._make_one(parent).order_by("X.Y").start_after({"X": {"Y": u"Z"}}) + + structured_query_pb = query._to_protobuf() + query_kwargs = { + "from": [ + query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id) + ], + "order_by": [ + _make_order_pb("X.Y", enums.StructuredQuery.Direction.ASCENDING) + ], + "start_at": query_pb2.Cursor( + values=[document_pb2.Value(string_value=u"Z")] + ), + } + expected_pb = query_pb2.StructuredQuery(**query_kwargs) + self.assertEqual(structured_query_pb, expected_pb) + + def test__to_protobuf_end_at_only(self): + # NOTE: "only" is wrong since we must have ``order_by`` as well. + from google.cloud.firestore_v1.gapic import enums + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import query_pb2 + + parent = mock.Mock(id="ghoti", spec=["id"]) + query = self._make_one(parent).order_by("a").end_at({"a": 88}) + + structured_query_pb = query._to_protobuf() + query_kwargs = { + "from": [ + query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id) + ], + "order_by": [ + _make_order_pb("a", enums.StructuredQuery.Direction.ASCENDING) + ], + "end_at": query_pb2.Cursor(values=[document_pb2.Value(integer_value=88)]), + } + expected_pb = query_pb2.StructuredQuery(**query_kwargs) + self.assertEqual(structured_query_pb, expected_pb) + + def test__to_protobuf_offset_only(self): + from google.cloud.firestore_v1.proto import query_pb2 + + parent = mock.Mock(id="cartt", spec=["id"]) + query1 = self._make_one(parent) + offset = 14 + query2 = query1.offset(offset) + + structured_query_pb = query2._to_protobuf() + query_kwargs = { + "from": [ + query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id) + ], + "offset": offset, + } + expected_pb = query_pb2.StructuredQuery(**query_kwargs) + self.assertEqual(structured_query_pb, expected_pb) + + def test__to_protobuf_limit_only(self): + from google.protobuf import wrappers_pb2 + from google.cloud.firestore_v1.proto import query_pb2 + + parent = mock.Mock(id="donut", spec=["id"]) + query1 = self._make_one(parent) + limit = 31 + query2 = query1.limit(limit) + + structured_query_pb = query2._to_protobuf() + query_kwargs = { + "from": [ + query_pb2.StructuredQuery.CollectionSelector(collection_id=parent.id) + ], + "limit": wrappers_pb2.Int32Value(value=limit), + } + expected_pb = query_pb2.StructuredQuery(**query_kwargs) + + self.assertEqual(structured_query_pb, expected_pb) + + def test_get_simple(self): + import warnings + + # Create a minimal fake GAPIC. + firestore_api = mock.Mock(spec=["run_query"]) + + # Attach the fake GAPIC to a real client. + client = _make_client() + client._firestore_api_internal = firestore_api + + # Make a **real** collection reference as parent. + parent = client.collection("dee") + + # Add a dummy response to the minimal fake GAPIC. + _, expected_prefix = parent._parent_info() + name = "{}/sleep".format(expected_prefix) + data = {"snooze": 10} + response_pb = _make_query_response(name=name, data=data) + firestore_api.run_query.return_value = iter([response_pb]) + + # Execute the query and check the response. + query = self._make_one(parent) + + with warnings.catch_warnings(record=True) as warned: + get_response = query.get() + + self.assertIsInstance(get_response, types.GeneratorType) + returned = list(get_response) + self.assertEqual(len(returned), 1) + snapshot = returned[0] + self.assertEqual(snapshot.reference._path, ("dee", "sleep")) + self.assertEqual(snapshot.to_dict(), data) + + # Verify the mock call. + parent_path, _ = parent._parent_info() + firestore_api.run_query.assert_called_once_with( + parent_path, + query._to_protobuf(), + transaction=None, + metadata=client._rpc_metadata, + ) + + # Verify the deprecation + self.assertEqual(len(warned), 1) + self.assertIs(warned[0].category, DeprecationWarning) + + def test_stream_simple(self): + # Create a minimal fake GAPIC. + firestore_api = mock.Mock(spec=["run_query"]) + + # Attach the fake GAPIC to a real client. + client = _make_client() + client._firestore_api_internal = firestore_api + + # Make a **real** collection reference as parent. + parent = client.collection("dee") + + # Add a dummy response to the minimal fake GAPIC. + _, expected_prefix = parent._parent_info() + name = "{}/sleep".format(expected_prefix) + data = {"snooze": 10} + response_pb = _make_query_response(name=name, data=data) + firestore_api.run_query.return_value = iter([response_pb]) + + # Execute the query and check the response. + query = self._make_one(parent) + get_response = query.stream() + self.assertIsInstance(get_response, types.GeneratorType) + returned = list(get_response) + self.assertEqual(len(returned), 1) + snapshot = returned[0] + self.assertEqual(snapshot.reference._path, ("dee", "sleep")) + self.assertEqual(snapshot.to_dict(), data) + + # Verify the mock call. + parent_path, _ = parent._parent_info() + firestore_api.run_query.assert_called_once_with( + parent_path, + query._to_protobuf(), + transaction=None, + metadata=client._rpc_metadata, + ) + + def test_stream_with_transaction(self): + # Create a minimal fake GAPIC. + firestore_api = mock.Mock(spec=["run_query"]) + + # Attach the fake GAPIC to a real client. + client = _make_client() + client._firestore_api_internal = firestore_api + + # Create a real-ish transaction for this client. + transaction = client.transaction() + txn_id = b"\x00\x00\x01-work-\xf2" + transaction._id = txn_id + + # Make a **real** collection reference as parent. + parent = client.collection("declaration") + + # Add a dummy response to the minimal fake GAPIC. + parent_path, expected_prefix = parent._parent_info() + name = "{}/burger".format(expected_prefix) + data = {"lettuce": b"\xee\x87"} + response_pb = _make_query_response(name=name, data=data) + firestore_api.run_query.return_value = iter([response_pb]) + + # Execute the query and check the response. + query = self._make_one(parent) + get_response = query.stream(transaction=transaction) + self.assertIsInstance(get_response, types.GeneratorType) + returned = list(get_response) + self.assertEqual(len(returned), 1) + snapshot = returned[0] + self.assertEqual(snapshot.reference._path, ("declaration", "burger")) + self.assertEqual(snapshot.to_dict(), data) + + # Verify the mock call. + firestore_api.run_query.assert_called_once_with( + parent_path, + query._to_protobuf(), + transaction=txn_id, + metadata=client._rpc_metadata, + ) + + def test_stream_no_results(self): + # Create a minimal fake GAPIC with a dummy response. + firestore_api = mock.Mock(spec=["run_query"]) + empty_response = _make_query_response() + run_query_response = iter([empty_response]) + firestore_api.run_query.return_value = run_query_response + + # Attach the fake GAPIC to a real client. + client = _make_client() + client._firestore_api_internal = firestore_api + + # Make a **real** collection reference as parent. + parent = client.collection("dah", "dah", "dum") + query = self._make_one(parent) + + get_response = query.stream() + self.assertIsInstance(get_response, types.GeneratorType) + self.assertEqual(list(get_response), []) + + # Verify the mock call. + parent_path, _ = parent._parent_info() + firestore_api.run_query.assert_called_once_with( + parent_path, + query._to_protobuf(), + transaction=None, + metadata=client._rpc_metadata, + ) + + def test_stream_second_response_in_empty_stream(self): + # Create a minimal fake GAPIC with a dummy response. + firestore_api = mock.Mock(spec=["run_query"]) + empty_response1 = _make_query_response() + empty_response2 = _make_query_response() + run_query_response = iter([empty_response1, empty_response2]) + firestore_api.run_query.return_value = run_query_response + + # Attach the fake GAPIC to a real client. + client = _make_client() + client._firestore_api_internal = firestore_api + + # Make a **real** collection reference as parent. + parent = client.collection("dah", "dah", "dum") + query = self._make_one(parent) + + get_response = query.stream() + self.assertIsInstance(get_response, types.GeneratorType) + self.assertEqual(list(get_response), []) + + # Verify the mock call. + parent_path, _ = parent._parent_info() + firestore_api.run_query.assert_called_once_with( + parent_path, + query._to_protobuf(), + transaction=None, + metadata=client._rpc_metadata, + ) + + def test_stream_with_skipped_results(self): + # Create a minimal fake GAPIC. + firestore_api = mock.Mock(spec=["run_query"]) + + # Attach the fake GAPIC to a real client. + client = _make_client() + client._firestore_api_internal = firestore_api + + # Make a **real** collection reference as parent. + parent = client.collection("talk", "and", "chew-gum") + + # Add two dummy responses to the minimal fake GAPIC. + _, expected_prefix = parent._parent_info() + response_pb1 = _make_query_response(skipped_results=1) + name = "{}/clock".format(expected_prefix) + data = {"noon": 12, "nested": {"bird": 10.5}} + response_pb2 = _make_query_response(name=name, data=data) + firestore_api.run_query.return_value = iter([response_pb1, response_pb2]) + + # Execute the query and check the response. + query = self._make_one(parent) + get_response = query.stream() + self.assertIsInstance(get_response, types.GeneratorType) + returned = list(get_response) + self.assertEqual(len(returned), 1) + snapshot = returned[0] + self.assertEqual(snapshot.reference._path, ("talk", "and", "chew-gum", "clock")) + self.assertEqual(snapshot.to_dict(), data) + + # Verify the mock call. + parent_path, _ = parent._parent_info() + firestore_api.run_query.assert_called_once_with( + parent_path, + query._to_protobuf(), + transaction=None, + metadata=client._rpc_metadata, + ) + + def test_stream_empty_after_first_response(self): + # Create a minimal fake GAPIC. + firestore_api = mock.Mock(spec=["run_query"]) + + # Attach the fake GAPIC to a real client. + client = _make_client() + client._firestore_api_internal = firestore_api + + # Make a **real** collection reference as parent. + parent = client.collection("charles") + + # Add two dummy responses to the minimal fake GAPIC. + _, expected_prefix = parent._parent_info() + name = "{}/bark".format(expected_prefix) + data = {"lee": "hoop"} + response_pb1 = _make_query_response(name=name, data=data) + response_pb2 = _make_query_response() + firestore_api.run_query.return_value = iter([response_pb1, response_pb2]) + + # Execute the query and check the response. + query = self._make_one(parent) + get_response = query.stream() + self.assertIsInstance(get_response, types.GeneratorType) + returned = list(get_response) + self.assertEqual(len(returned), 1) + snapshot = returned[0] + self.assertEqual(snapshot.reference._path, ("charles", "bark")) + self.assertEqual(snapshot.to_dict(), data) + + # Verify the mock call. + parent_path, _ = parent._parent_info() + firestore_api.run_query.assert_called_once_with( + parent_path, + query._to_protobuf(), + transaction=None, + metadata=client._rpc_metadata, + ) + + @mock.patch("google.cloud.firestore_v1.query.Watch", autospec=True) + def test_on_snapshot(self, watch): + query = self._make_one(mock.sentinel.parent) + query.on_snapshot(None) + watch.for_query.assert_called_once() + + def test_comparator_no_ordering(self): + query = self._make_one(mock.sentinel.parent) + query._orders = [] + doc1 = mock.Mock() + doc1.reference._path = ("col", "adocument1") + + doc2 = mock.Mock() + doc2.reference._path = ("col", "adocument2") + + sort = query._comparator(doc1, doc2) + self.assertEqual(sort, -1) + + def test_comparator_no_ordering_same_id(self): + query = self._make_one(mock.sentinel.parent) + query._orders = [] + doc1 = mock.Mock() + doc1.reference._path = ("col", "adocument1") + + doc2 = mock.Mock() + doc2.reference._path = ("col", "adocument1") + + sort = query._comparator(doc1, doc2) + self.assertEqual(sort, 0) + + def test_comparator_ordering(self): + query = self._make_one(mock.sentinel.parent) + orderByMock = mock.Mock() + orderByMock.field.field_path = "last" + orderByMock.direction = 1 # ascending + query._orders = [orderByMock] + + doc1 = mock.Mock() + doc1.reference._path = ("col", "adocument1") + doc1._data = { + "first": {"stringValue": "Ada"}, + "last": {"stringValue": "secondlovelace"}, + } + doc2 = mock.Mock() + doc2.reference._path = ("col", "adocument2") + doc2._data = { + "first": {"stringValue": "Ada"}, + "last": {"stringValue": "lovelace"}, + } + + sort = query._comparator(doc1, doc2) + self.assertEqual(sort, 1) + + def test_comparator_ordering_descending(self): + query = self._make_one(mock.sentinel.parent) + orderByMock = mock.Mock() + orderByMock.field.field_path = "last" + orderByMock.direction = -1 # descending + query._orders = [orderByMock] + + doc1 = mock.Mock() + doc1.reference._path = ("col", "adocument1") + doc1._data = { + "first": {"stringValue": "Ada"}, + "last": {"stringValue": "secondlovelace"}, + } + doc2 = mock.Mock() + doc2.reference._path = ("col", "adocument2") + doc2._data = { + "first": {"stringValue": "Ada"}, + "last": {"stringValue": "lovelace"}, + } + + sort = query._comparator(doc1, doc2) + self.assertEqual(sort, -1) + + def test_comparator_missing_order_by_field_in_data_raises(self): + query = self._make_one(mock.sentinel.parent) + orderByMock = mock.Mock() + orderByMock.field.field_path = "last" + orderByMock.direction = 1 # ascending + query._orders = [orderByMock] + + doc1 = mock.Mock() + doc1.reference._path = ("col", "adocument1") + doc1._data = {} + doc2 = mock.Mock() + doc2.reference._path = ("col", "adocument2") + doc2._data = { + "first": {"stringValue": "Ada"}, + "last": {"stringValue": "lovelace"}, + } + + with self.assertRaisesRegex(ValueError, "Can only compare fields "): + query._comparator(doc1, doc2) + + +class Test__enum_from_op_string(unittest.TestCase): + @staticmethod + def _call_fut(op_string): + from google.cloud.firestore_v1.query import _enum_from_op_string + + return _enum_from_op_string(op_string) + + def test_success(self): + from google.cloud.firestore_v1.gapic import enums + + op_class = enums.StructuredQuery.FieldFilter.Operator + self.assertEqual(self._call_fut("<"), op_class.LESS_THAN) + self.assertEqual(self._call_fut("<="), op_class.LESS_THAN_OR_EQUAL) + self.assertEqual(self._call_fut("=="), op_class.EQUAL) + self.assertEqual(self._call_fut(">="), op_class.GREATER_THAN_OR_EQUAL) + self.assertEqual(self._call_fut(">"), op_class.GREATER_THAN) + self.assertEqual(self._call_fut("array_contains"), op_class.ARRAY_CONTAINS) + + def test_failure(self): + with self.assertRaises(ValueError): + self._call_fut("?") + + +class Test__isnan(unittest.TestCase): + @staticmethod + def _call_fut(value): + from google.cloud.firestore_v1.query import _isnan + + return _isnan(value) + + def test_valid(self): + self.assertTrue(self._call_fut(float("nan"))) + + def test_invalid(self): + self.assertFalse(self._call_fut(51.5)) + self.assertFalse(self._call_fut(None)) + self.assertFalse(self._call_fut("str")) + self.assertFalse(self._call_fut(int)) + self.assertFalse(self._call_fut(1.0 + 1.0j)) + + +class Test__enum_from_direction(unittest.TestCase): + @staticmethod + def _call_fut(direction): + from google.cloud.firestore_v1.query import _enum_from_direction + + return _enum_from_direction(direction) + + def test_success(self): + from google.cloud.firestore_v1.gapic import enums + from google.cloud.firestore_v1.query import Query + + dir_class = enums.StructuredQuery.Direction + self.assertEqual(self._call_fut(Query.ASCENDING), dir_class.ASCENDING) + self.assertEqual(self._call_fut(Query.DESCENDING), dir_class.DESCENDING) + + # Ints pass through + self.assertEqual(self._call_fut(dir_class.ASCENDING), dir_class.ASCENDING) + self.assertEqual(self._call_fut(dir_class.DESCENDING), dir_class.DESCENDING) + + def test_failure(self): + with self.assertRaises(ValueError): + self._call_fut("neither-ASCENDING-nor-DESCENDING") + + +class Test__filter_pb(unittest.TestCase): + @staticmethod + def _call_fut(field_or_unary): + from google.cloud.firestore_v1.query import _filter_pb + + return _filter_pb(field_or_unary) + + def test_unary(self): + from google.cloud.firestore_v1.gapic import enums + from google.cloud.firestore_v1.proto import query_pb2 + + unary_pb = query_pb2.StructuredQuery.UnaryFilter( + field=query_pb2.StructuredQuery.FieldReference(field_path="a.b.c"), + op=enums.StructuredQuery.UnaryFilter.Operator.IS_NULL, + ) + filter_pb = self._call_fut(unary_pb) + expected_pb = query_pb2.StructuredQuery.Filter(unary_filter=unary_pb) + self.assertEqual(filter_pb, expected_pb) + + def test_field(self): + from google.cloud.firestore_v1.gapic import enums + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import query_pb2 + + field_filter_pb = query_pb2.StructuredQuery.FieldFilter( + field=query_pb2.StructuredQuery.FieldReference(field_path="XYZ"), + op=enums.StructuredQuery.FieldFilter.Operator.GREATER_THAN, + value=document_pb2.Value(double_value=90.75), + ) + filter_pb = self._call_fut(field_filter_pb) + expected_pb = query_pb2.StructuredQuery.Filter(field_filter=field_filter_pb) + self.assertEqual(filter_pb, expected_pb) + + def test_bad_type(self): + with self.assertRaises(ValueError): + self._call_fut(None) + + +class Test__cursor_pb(unittest.TestCase): + @staticmethod + def _call_fut(cursor_pair): + from google.cloud.firestore_v1.query import _cursor_pb + + return _cursor_pb(cursor_pair) + + def test_no_pair(self): + self.assertIsNone(self._call_fut(None)) + + def test_success(self): + from google.cloud.firestore_v1.proto import query_pb2 + from google.cloud.firestore_v1 import _helpers + + data = [1.5, 10, True] + cursor_pair = data, True + + cursor_pb = self._call_fut(cursor_pair) + + expected_pb = query_pb2.Cursor( + values=[_helpers.encode_value(value) for value in data], before=True + ) + self.assertEqual(cursor_pb, expected_pb) + + +class Test__query_response_to_snapshot(unittest.TestCase): + @staticmethod + def _call_fut(response_pb, collection, expected_prefix): + from google.cloud.firestore_v1.query import _query_response_to_snapshot + + return _query_response_to_snapshot(response_pb, collection, expected_prefix) + + def test_empty(self): + response_pb = _make_query_response() + snapshot = self._call_fut(response_pb, None, None) + self.assertIsNone(snapshot) + + def test_after_offset(self): + skipped_results = 410 + response_pb = _make_query_response(skipped_results=skipped_results) + snapshot = self._call_fut(response_pb, None, None) + self.assertIsNone(snapshot) + + def test_response(self): + from google.cloud.firestore_v1.document import DocumentSnapshot + + client = _make_client() + collection = client.collection("a", "b", "c") + _, expected_prefix = collection._parent_info() + + # Create name for the protobuf. + doc_id = "gigantic" + name = "{}/{}".format(expected_prefix, doc_id) + data = {"a": 901, "b": True} + response_pb = _make_query_response(name=name, data=data) + + snapshot = self._call_fut(response_pb, collection, expected_prefix) + self.assertIsInstance(snapshot, DocumentSnapshot) + expected_path = collection._path + (doc_id,) + self.assertEqual(snapshot.reference._path, expected_path) + self.assertEqual(snapshot.to_dict(), data) + self.assertTrue(snapshot.exists) + self.assertEqual(snapshot.read_time, response_pb.read_time) + self.assertEqual(snapshot.create_time, response_pb.document.create_time) + self.assertEqual(snapshot.update_time, response_pb.document.update_time) + + +def _make_credentials(): + import google.auth.credentials + + return mock.Mock(spec=google.auth.credentials.Credentials) + + +def _make_client(project="project-project"): + from google.cloud.firestore_v1.client import Client + + credentials = _make_credentials() + return Client(project=project, credentials=credentials) + + +def _make_order_pb(field_path, direction): + from google.cloud.firestore_v1.proto import query_pb2 + + return query_pb2.StructuredQuery.Order( + field=query_pb2.StructuredQuery.FieldReference(field_path=field_path), + direction=direction, + ) + + +def _make_query_response(**kwargs): + # kwargs supported are ``skipped_results``, ``name`` and ``data`` + from google.cloud.firestore_v1.proto import document_pb2 + from google.cloud.firestore_v1.proto import firestore_pb2 + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.firestore_v1 import _helpers + + now = datetime.datetime.utcnow() + read_time = _datetime_to_pb_timestamp(now) + kwargs["read_time"] = read_time + + name = kwargs.pop("name", None) + data = kwargs.pop("data", None) + if name is not None and data is not None: + document_pb = document_pb2.Document( + name=name, fields=_helpers.encode_dict(data) + ) + delta = datetime.timedelta(seconds=100) + update_time = _datetime_to_pb_timestamp(now - delta) + create_time = _datetime_to_pb_timestamp(now - 2 * delta) + document_pb.update_time.CopyFrom(update_time) + document_pb.create_time.CopyFrom(create_time) + + kwargs["document"] = document_pb + + return firestore_pb2.RunQueryResponse(**kwargs) diff --git a/firestore/tests/unit/v1/test_transaction.py b/firestore/tests/unit/v1/test_transaction.py new file mode 100644 index 000000000000..ed578ad3eea6 --- /dev/null +++ b/firestore/tests/unit/v1/test_transaction.py @@ -0,0 +1,985 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import mock + + +class TestTransaction(unittest.TestCase): + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1.transaction import Transaction + + return Transaction + + def _make_one(self, *args, **kwargs): + klass = self._get_target_class() + return klass(*args, **kwargs) + + def test_constructor_defaults(self): + from google.cloud.firestore_v1.transaction import MAX_ATTEMPTS + + transaction = self._make_one(mock.sentinel.client) + self.assertIs(transaction._client, mock.sentinel.client) + self.assertEqual(transaction._write_pbs, []) + self.assertEqual(transaction._max_attempts, MAX_ATTEMPTS) + self.assertFalse(transaction._read_only) + self.assertIsNone(transaction._id) + + def test_constructor_explicit(self): + transaction = self._make_one( + mock.sentinel.client, max_attempts=10, read_only=True + ) + self.assertIs(transaction._client, mock.sentinel.client) + self.assertEqual(transaction._write_pbs, []) + self.assertEqual(transaction._max_attempts, 10) + self.assertTrue(transaction._read_only) + self.assertIsNone(transaction._id) + + def test__add_write_pbs_failure(self): + from google.cloud.firestore_v1.transaction import _WRITE_READ_ONLY + + batch = self._make_one(mock.sentinel.client, read_only=True) + self.assertEqual(batch._write_pbs, []) + with self.assertRaises(ValueError) as exc_info: + batch._add_write_pbs([mock.sentinel.write]) + + self.assertEqual(exc_info.exception.args, (_WRITE_READ_ONLY,)) + self.assertEqual(batch._write_pbs, []) + + def test__add_write_pbs(self): + batch = self._make_one(mock.sentinel.client) + self.assertEqual(batch._write_pbs, []) + batch._add_write_pbs([mock.sentinel.write]) + self.assertEqual(batch._write_pbs, [mock.sentinel.write]) + + def test__options_protobuf_read_only(self): + from google.cloud.firestore_v1.proto import common_pb2 + + transaction = self._make_one(mock.sentinel.client, read_only=True) + options_pb = transaction._options_protobuf(None) + expected_pb = common_pb2.TransactionOptions( + read_only=common_pb2.TransactionOptions.ReadOnly() + ) + self.assertEqual(options_pb, expected_pb) + + def test__options_protobuf_read_only_retry(self): + from google.cloud.firestore_v1.transaction import _CANT_RETRY_READ_ONLY + + transaction = self._make_one(mock.sentinel.client, read_only=True) + retry_id = b"illuminate" + + with self.assertRaises(ValueError) as exc_info: + transaction._options_protobuf(retry_id) + + self.assertEqual(exc_info.exception.args, (_CANT_RETRY_READ_ONLY,)) + + def test__options_protobuf_read_write(self): + transaction = self._make_one(mock.sentinel.client) + options_pb = transaction._options_protobuf(None) + self.assertIsNone(options_pb) + + def test__options_protobuf_on_retry(self): + from google.cloud.firestore_v1.proto import common_pb2 + + transaction = self._make_one(mock.sentinel.client) + retry_id = b"hocus-pocus" + options_pb = transaction._options_protobuf(retry_id) + expected_pb = common_pb2.TransactionOptions( + read_write=common_pb2.TransactionOptions.ReadWrite( + retry_transaction=retry_id + ) + ) + self.assertEqual(options_pb, expected_pb) + + def test_in_progress_property(self): + transaction = self._make_one(mock.sentinel.client) + self.assertFalse(transaction.in_progress) + transaction._id = b"not-none-bites" + self.assertTrue(transaction.in_progress) + + def test_id_property(self): + transaction = self._make_one(mock.sentinel.client) + transaction._id = mock.sentinel.eye_dee + self.assertIs(transaction.id, mock.sentinel.eye_dee) + + def test__begin(self): + from google.cloud.firestore_v1.gapic import firestore_client + from google.cloud.firestore_v1.proto import firestore_pb2 + + # Create a minimal fake GAPIC with a dummy result. + firestore_api = mock.create_autospec( + firestore_client.FirestoreClient, instance=True + ) + txn_id = b"to-begin" + response = firestore_pb2.BeginTransactionResponse(transaction=txn_id) + firestore_api.begin_transaction.return_value = response + + # Attach the fake GAPIC to a real client. + client = _make_client() + client._firestore_api_internal = firestore_api + + # Actually make a transaction and ``begin()`` it. + transaction = self._make_one(client) + self.assertIsNone(transaction._id) + + ret_val = transaction._begin() + self.assertIsNone(ret_val) + self.assertEqual(transaction._id, txn_id) + + # Verify the called mock. + firestore_api.begin_transaction.assert_called_once_with( + client._database_string, options_=None, metadata=client._rpc_metadata + ) + + def test__begin_failure(self): + from google.cloud.firestore_v1.transaction import _CANT_BEGIN + + client = _make_client() + transaction = self._make_one(client) + transaction._id = b"not-none" + + with self.assertRaises(ValueError) as exc_info: + transaction._begin() + + err_msg = _CANT_BEGIN.format(transaction._id) + self.assertEqual(exc_info.exception.args, (err_msg,)) + + def test__clean_up(self): + transaction = self._make_one(mock.sentinel.client) + transaction._write_pbs.extend( + [mock.sentinel.write_pb1, mock.sentinel.write_pb2] + ) + transaction._id = b"not-this-time-my-friend" + + ret_val = transaction._clean_up() + self.assertIsNone(ret_val) + + self.assertEqual(transaction._write_pbs, []) + self.assertIsNone(transaction._id) + + def test__rollback(self): + from google.protobuf import empty_pb2 + from google.cloud.firestore_v1.gapic import firestore_client + + # Create a minimal fake GAPIC with a dummy result. + firestore_api = mock.create_autospec( + firestore_client.FirestoreClient, instance=True + ) + firestore_api.rollback.return_value = empty_pb2.Empty() + + # Attach the fake GAPIC to a real client. + client = _make_client() + client._firestore_api_internal = firestore_api + + # Actually make a transaction and roll it back. + transaction = self._make_one(client) + txn_id = b"to-be-r\x00lled" + transaction._id = txn_id + ret_val = transaction._rollback() + self.assertIsNone(ret_val) + self.assertIsNone(transaction._id) + + # Verify the called mock. + firestore_api.rollback.assert_called_once_with( + client._database_string, txn_id, metadata=client._rpc_metadata + ) + + def test__rollback_not_allowed(self): + from google.cloud.firestore_v1.transaction import _CANT_ROLLBACK + + client = _make_client() + transaction = self._make_one(client) + self.assertIsNone(transaction._id) + + with self.assertRaises(ValueError) as exc_info: + transaction._rollback() + + self.assertEqual(exc_info.exception.args, (_CANT_ROLLBACK,)) + + def test__rollback_failure(self): + from google.api_core import exceptions + from google.cloud.firestore_v1.gapic import firestore_client + + # Create a minimal fake GAPIC with a dummy failure. + firestore_api = mock.create_autospec( + firestore_client.FirestoreClient, instance=True + ) + exc = exceptions.InternalServerError("Fire during rollback.") + firestore_api.rollback.side_effect = exc + + # Attach the fake GAPIC to a real client. + client = _make_client() + client._firestore_api_internal = firestore_api + + # Actually make a transaction and roll it back. + transaction = self._make_one(client) + txn_id = b"roll-bad-server" + transaction._id = txn_id + + with self.assertRaises(exceptions.InternalServerError) as exc_info: + transaction._rollback() + + self.assertIs(exc_info.exception, exc) + self.assertIsNone(transaction._id) + self.assertEqual(transaction._write_pbs, []) + + # Verify the called mock. + firestore_api.rollback.assert_called_once_with( + client._database_string, txn_id, metadata=client._rpc_metadata + ) + + def test__commit(self): + from google.cloud.firestore_v1.gapic import firestore_client + from google.cloud.firestore_v1.proto import firestore_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + + # Create a minimal fake GAPIC with a dummy result. + firestore_api = mock.create_autospec( + firestore_client.FirestoreClient, instance=True + ) + commit_response = firestore_pb2.CommitResponse( + write_results=[write_pb2.WriteResult()] + ) + firestore_api.commit.return_value = commit_response + + # Attach the fake GAPIC to a real client. + client = _make_client("phone-joe") + client._firestore_api_internal = firestore_api + + # Actually make a transaction with some mutations and call _commit(). + transaction = self._make_one(client) + txn_id = b"under-over-thru-woods" + transaction._id = txn_id + document = client.document("zap", "galaxy", "ship", "space") + transaction.set(document, {"apple": 4.5}) + write_pbs = transaction._write_pbs[::] + + write_results = transaction._commit() + self.assertEqual(write_results, list(commit_response.write_results)) + # Make sure transaction has no more "changes". + self.assertIsNone(transaction._id) + self.assertEqual(transaction._write_pbs, []) + + # Verify the mocks. + firestore_api.commit.assert_called_once_with( + client._database_string, + write_pbs, + transaction=txn_id, + metadata=client._rpc_metadata, + ) + + def test__commit_not_allowed(self): + from google.cloud.firestore_v1.transaction import _CANT_COMMIT + + transaction = self._make_one(mock.sentinel.client) + self.assertIsNone(transaction._id) + with self.assertRaises(ValueError) as exc_info: + transaction._commit() + + self.assertEqual(exc_info.exception.args, (_CANT_COMMIT,)) + + def test__commit_failure(self): + from google.api_core import exceptions + from google.cloud.firestore_v1.gapic import firestore_client + + # Create a minimal fake GAPIC with a dummy failure. + firestore_api = mock.create_autospec( + firestore_client.FirestoreClient, instance=True + ) + exc = exceptions.InternalServerError("Fire during commit.") + firestore_api.commit.side_effect = exc + + # Attach the fake GAPIC to a real client. + client = _make_client() + client._firestore_api_internal = firestore_api + + # Actually make a transaction with some mutations and call _commit(). + transaction = self._make_one(client) + txn_id = b"beep-fail-commit" + transaction._id = txn_id + transaction.create(client.document("up", "down"), {"water": 1.0}) + transaction.delete(client.document("up", "left")) + write_pbs = transaction._write_pbs[::] + + with self.assertRaises(exceptions.InternalServerError) as exc_info: + transaction._commit() + + self.assertIs(exc_info.exception, exc) + self.assertEqual(transaction._id, txn_id) + self.assertEqual(transaction._write_pbs, write_pbs) + + # Verify the called mock. + firestore_api.commit.assert_called_once_with( + client._database_string, + write_pbs, + transaction=txn_id, + metadata=client._rpc_metadata, + ) + + +class Test_Transactional(unittest.TestCase): + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1.transaction import _Transactional + + return _Transactional + + def _make_one(self, *args, **kwargs): + klass = self._get_target_class() + return klass(*args, **kwargs) + + def test_constructor(self): + wrapped = self._make_one(mock.sentinel.callable_) + self.assertIs(wrapped.to_wrap, mock.sentinel.callable_) + self.assertIsNone(wrapped.current_id) + self.assertIsNone(wrapped.retry_id) + + def test__reset(self): + wrapped = self._make_one(mock.sentinel.callable_) + wrapped.current_id = b"not-none" + wrapped.retry_id = b"also-not" + + ret_val = wrapped._reset() + self.assertIsNone(ret_val) + + self.assertIsNone(wrapped.current_id) + self.assertIsNone(wrapped.retry_id) + + def test__pre_commit_success(self): + to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) + wrapped = self._make_one(to_wrap) + + txn_id = b"totes-began" + transaction = _make_transaction(txn_id) + result = wrapped._pre_commit(transaction, "pos", key="word") + self.assertIs(result, mock.sentinel.result) + + self.assertEqual(transaction._id, txn_id) + self.assertEqual(wrapped.current_id, txn_id) + self.assertEqual(wrapped.retry_id, txn_id) + + # Verify mocks. + to_wrap.assert_called_once_with(transaction, "pos", key="word") + firestore_api = transaction._client._firestore_api + firestore_api.begin_transaction.assert_called_once_with( + transaction._client._database_string, + options_=None, + metadata=transaction._client._rpc_metadata, + ) + firestore_api.rollback.assert_not_called() + firestore_api.commit.assert_not_called() + + def test__pre_commit_retry_id_already_set_success(self): + from google.cloud.firestore_v1.proto import common_pb2 + + to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) + wrapped = self._make_one(to_wrap) + txn_id1 = b"already-set" + wrapped.retry_id = txn_id1 + + txn_id2 = b"ok-here-too" + transaction = _make_transaction(txn_id2) + result = wrapped._pre_commit(transaction) + self.assertIs(result, mock.sentinel.result) + + self.assertEqual(transaction._id, txn_id2) + self.assertEqual(wrapped.current_id, txn_id2) + self.assertEqual(wrapped.retry_id, txn_id1) + + # Verify mocks. + to_wrap.assert_called_once_with(transaction) + firestore_api = transaction._client._firestore_api + options_ = common_pb2.TransactionOptions( + read_write=common_pb2.TransactionOptions.ReadWrite( + retry_transaction=txn_id1 + ) + ) + firestore_api.begin_transaction.assert_called_once_with( + transaction._client._database_string, + options_=options_, + metadata=transaction._client._rpc_metadata, + ) + firestore_api.rollback.assert_not_called() + firestore_api.commit.assert_not_called() + + def test__pre_commit_failure(self): + exc = RuntimeError("Nope not today.") + to_wrap = mock.Mock(side_effect=exc, spec=[]) + wrapped = self._make_one(to_wrap) + + txn_id = b"gotta-fail" + transaction = _make_transaction(txn_id) + with self.assertRaises(RuntimeError) as exc_info: + wrapped._pre_commit(transaction, 10, 20) + self.assertIs(exc_info.exception, exc) + + self.assertIsNone(transaction._id) + self.assertEqual(wrapped.current_id, txn_id) + self.assertEqual(wrapped.retry_id, txn_id) + + # Verify mocks. + to_wrap.assert_called_once_with(transaction, 10, 20) + firestore_api = transaction._client._firestore_api + firestore_api.begin_transaction.assert_called_once_with( + transaction._client._database_string, + options_=None, + metadata=transaction._client._rpc_metadata, + ) + firestore_api.rollback.assert_called_once_with( + transaction._client._database_string, + txn_id, + metadata=transaction._client._rpc_metadata, + ) + firestore_api.commit.assert_not_called() + + def test__pre_commit_failure_with_rollback_failure(self): + from google.api_core import exceptions + + exc1 = ValueError("I will not be only failure.") + to_wrap = mock.Mock(side_effect=exc1, spec=[]) + wrapped = self._make_one(to_wrap) + + txn_id = b"both-will-fail" + transaction = _make_transaction(txn_id) + # Actually force the ``rollback`` to fail as well. + exc2 = exceptions.InternalServerError("Rollback blues.") + firestore_api = transaction._client._firestore_api + firestore_api.rollback.side_effect = exc2 + + # Try to ``_pre_commit`` + with self.assertRaises(exceptions.InternalServerError) as exc_info: + wrapped._pre_commit(transaction, a="b", c="zebra") + self.assertIs(exc_info.exception, exc2) + + self.assertIsNone(transaction._id) + self.assertEqual(wrapped.current_id, txn_id) + self.assertEqual(wrapped.retry_id, txn_id) + + # Verify mocks. + to_wrap.assert_called_once_with(transaction, a="b", c="zebra") + firestore_api.begin_transaction.assert_called_once_with( + transaction._client._database_string, + options_=None, + metadata=transaction._client._rpc_metadata, + ) + firestore_api.rollback.assert_called_once_with( + transaction._client._database_string, + txn_id, + metadata=transaction._client._rpc_metadata, + ) + firestore_api.commit.assert_not_called() + + def test__maybe_commit_success(self): + wrapped = self._make_one(mock.sentinel.callable_) + + txn_id = b"nyet" + transaction = _make_transaction(txn_id) + transaction._id = txn_id # We won't call ``begin()``. + succeeded = wrapped._maybe_commit(transaction) + self.assertTrue(succeeded) + + # On success, _id is reset. + self.assertIsNone(transaction._id) + + # Verify mocks. + firestore_api = transaction._client._firestore_api + firestore_api.begin_transaction.assert_not_called() + firestore_api.rollback.assert_not_called() + firestore_api.commit.assert_called_once_with( + transaction._client._database_string, + [], + transaction=txn_id, + metadata=transaction._client._rpc_metadata, + ) + + def test__maybe_commit_failure_read_only(self): + from google.api_core import exceptions + + wrapped = self._make_one(mock.sentinel.callable_) + + txn_id = b"failed" + transaction = _make_transaction(txn_id, read_only=True) + transaction._id = txn_id # We won't call ``begin()``. + wrapped.current_id = txn_id # We won't call ``_pre_commit()``. + wrapped.retry_id = txn_id # We won't call ``_pre_commit()``. + + # Actually force the ``commit`` to fail (use ABORTED, but cannot + # retry since read-only). + exc = exceptions.Aborted("Read-only did a bad.") + firestore_api = transaction._client._firestore_api + firestore_api.commit.side_effect = exc + + with self.assertRaises(exceptions.Aborted) as exc_info: + wrapped._maybe_commit(transaction) + self.assertIs(exc_info.exception, exc) + + self.assertEqual(transaction._id, txn_id) + self.assertEqual(wrapped.current_id, txn_id) + self.assertEqual(wrapped.retry_id, txn_id) + + # Verify mocks. + firestore_api.begin_transaction.assert_not_called() + firestore_api.rollback.assert_not_called() + firestore_api.commit.assert_called_once_with( + transaction._client._database_string, + [], + transaction=txn_id, + metadata=transaction._client._rpc_metadata, + ) + + def test__maybe_commit_failure_can_retry(self): + from google.api_core import exceptions + + wrapped = self._make_one(mock.sentinel.callable_) + + txn_id = b"failed-but-retry" + transaction = _make_transaction(txn_id) + transaction._id = txn_id # We won't call ``begin()``. + wrapped.current_id = txn_id # We won't call ``_pre_commit()``. + wrapped.retry_id = txn_id # We won't call ``_pre_commit()``. + + # Actually force the ``commit`` to fail. + exc = exceptions.Aborted("Read-write did a bad.") + firestore_api = transaction._client._firestore_api + firestore_api.commit.side_effect = exc + + succeeded = wrapped._maybe_commit(transaction) + self.assertFalse(succeeded) + + self.assertEqual(transaction._id, txn_id) + self.assertEqual(wrapped.current_id, txn_id) + self.assertEqual(wrapped.retry_id, txn_id) + + # Verify mocks. + firestore_api.begin_transaction.assert_not_called() + firestore_api.rollback.assert_not_called() + firestore_api.commit.assert_called_once_with( + transaction._client._database_string, + [], + transaction=txn_id, + metadata=transaction._client._rpc_metadata, + ) + + def test__maybe_commit_failure_cannot_retry(self): + from google.api_core import exceptions + + wrapped = self._make_one(mock.sentinel.callable_) + + txn_id = b"failed-but-not-retryable" + transaction = _make_transaction(txn_id) + transaction._id = txn_id # We won't call ``begin()``. + wrapped.current_id = txn_id # We won't call ``_pre_commit()``. + wrapped.retry_id = txn_id # We won't call ``_pre_commit()``. + + # Actually force the ``commit`` to fail. + exc = exceptions.InternalServerError("Real bad thing") + firestore_api = transaction._client._firestore_api + firestore_api.commit.side_effect = exc + + with self.assertRaises(exceptions.InternalServerError) as exc_info: + wrapped._maybe_commit(transaction) + self.assertIs(exc_info.exception, exc) + + self.assertEqual(transaction._id, txn_id) + self.assertEqual(wrapped.current_id, txn_id) + self.assertEqual(wrapped.retry_id, txn_id) + + # Verify mocks. + firestore_api.begin_transaction.assert_not_called() + firestore_api.rollback.assert_not_called() + firestore_api.commit.assert_called_once_with( + transaction._client._database_string, + [], + transaction=txn_id, + metadata=transaction._client._rpc_metadata, + ) + + def test___call__success_first_attempt(self): + to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) + wrapped = self._make_one(to_wrap) + + txn_id = b"whole-enchilada" + transaction = _make_transaction(txn_id) + result = wrapped(transaction, "a", b="c") + self.assertIs(result, mock.sentinel.result) + + self.assertIsNone(transaction._id) + self.assertEqual(wrapped.current_id, txn_id) + self.assertEqual(wrapped.retry_id, txn_id) + + # Verify mocks. + to_wrap.assert_called_once_with(transaction, "a", b="c") + firestore_api = transaction._client._firestore_api + firestore_api.begin_transaction.assert_called_once_with( + transaction._client._database_string, + options_=None, + metadata=transaction._client._rpc_metadata, + ) + firestore_api.rollback.assert_not_called() + firestore_api.commit.assert_called_once_with( + transaction._client._database_string, + [], + transaction=txn_id, + metadata=transaction._client._rpc_metadata, + ) + + def test___call__success_second_attempt(self): + from google.api_core import exceptions + from google.cloud.firestore_v1.proto import common_pb2 + from google.cloud.firestore_v1.proto import firestore_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + + to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) + wrapped = self._make_one(to_wrap) + + txn_id = b"whole-enchilada" + transaction = _make_transaction(txn_id) + + # Actually force the ``commit`` to fail on first / succeed on second. + exc = exceptions.Aborted("Contention junction.") + firestore_api = transaction._client._firestore_api + firestore_api.commit.side_effect = [ + exc, + firestore_pb2.CommitResponse(write_results=[write_pb2.WriteResult()]), + ] + + # Call the __call__-able ``wrapped``. + result = wrapped(transaction, "a", b="c") + self.assertIs(result, mock.sentinel.result) + + self.assertIsNone(transaction._id) + self.assertEqual(wrapped.current_id, txn_id) + self.assertEqual(wrapped.retry_id, txn_id) + + # Verify mocks. + wrapped_call = mock.call(transaction, "a", b="c") + self.assertEqual(to_wrap.mock_calls, [wrapped_call, wrapped_call]) + firestore_api = transaction._client._firestore_api + db_str = transaction._client._database_string + options_ = common_pb2.TransactionOptions( + read_write=common_pb2.TransactionOptions.ReadWrite(retry_transaction=txn_id) + ) + self.assertEqual( + firestore_api.begin_transaction.mock_calls, + [ + mock.call( + db_str, options_=None, metadata=transaction._client._rpc_metadata + ), + mock.call( + db_str, + options_=options_, + metadata=transaction._client._rpc_metadata, + ), + ], + ) + firestore_api.rollback.assert_not_called() + commit_call = mock.call( + db_str, [], transaction=txn_id, metadata=transaction._client._rpc_metadata + ) + self.assertEqual(firestore_api.commit.mock_calls, [commit_call, commit_call]) + + def test___call__failure(self): + from google.api_core import exceptions + from google.cloud.firestore_v1.transaction import _EXCEED_ATTEMPTS_TEMPLATE + + to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) + wrapped = self._make_one(to_wrap) + + txn_id = b"only-one-shot" + transaction = _make_transaction(txn_id, max_attempts=1) + + # Actually force the ``commit`` to fail. + exc = exceptions.Aborted("Contention just once.") + firestore_api = transaction._client._firestore_api + firestore_api.commit.side_effect = exc + + # Call the __call__-able ``wrapped``. + with self.assertRaises(ValueError) as exc_info: + wrapped(transaction, "here", there=1.5) + + err_msg = _EXCEED_ATTEMPTS_TEMPLATE.format(transaction._max_attempts) + self.assertEqual(exc_info.exception.args, (err_msg,)) + + self.assertIsNone(transaction._id) + self.assertEqual(wrapped.current_id, txn_id) + self.assertEqual(wrapped.retry_id, txn_id) + + # Verify mocks. + to_wrap.assert_called_once_with(transaction, "here", there=1.5) + firestore_api.begin_transaction.assert_called_once_with( + transaction._client._database_string, + options_=None, + metadata=transaction._client._rpc_metadata, + ) + firestore_api.rollback.assert_called_once_with( + transaction._client._database_string, + txn_id, + metadata=transaction._client._rpc_metadata, + ) + firestore_api.commit.assert_called_once_with( + transaction._client._database_string, + [], + transaction=txn_id, + metadata=transaction._client._rpc_metadata, + ) + + +class Test_transactional(unittest.TestCase): + @staticmethod + def _call_fut(to_wrap): + from google.cloud.firestore_v1.transaction import transactional + + return transactional(to_wrap) + + def test_it(self): + from google.cloud.firestore_v1.transaction import _Transactional + + wrapped = self._call_fut(mock.sentinel.callable_) + self.assertIsInstance(wrapped, _Transactional) + self.assertIs(wrapped.to_wrap, mock.sentinel.callable_) + + +class Test__commit_with_retry(unittest.TestCase): + @staticmethod + def _call_fut(client, write_pbs, transaction_id): + from google.cloud.firestore_v1.transaction import _commit_with_retry + + return _commit_with_retry(client, write_pbs, transaction_id) + + @mock.patch("google.cloud.firestore_v1.transaction._sleep") + def test_success_first_attempt(self, _sleep): + from google.cloud.firestore_v1.gapic import firestore_client + + # Create a minimal fake GAPIC with a dummy result. + firestore_api = mock.create_autospec( + firestore_client.FirestoreClient, instance=True + ) + + # Attach the fake GAPIC to a real client. + client = _make_client("summer") + client._firestore_api_internal = firestore_api + + # Call function and check result. + txn_id = b"cheeeeeez" + commit_response = self._call_fut(client, mock.sentinel.write_pbs, txn_id) + self.assertIs(commit_response, firestore_api.commit.return_value) + + # Verify mocks used. + _sleep.assert_not_called() + firestore_api.commit.assert_called_once_with( + client._database_string, + mock.sentinel.write_pbs, + transaction=txn_id, + metadata=client._rpc_metadata, + ) + + @mock.patch("google.cloud.firestore_v1.transaction._sleep", side_effect=[2.0, 4.0]) + def test_success_third_attempt(self, _sleep): + from google.api_core import exceptions + from google.cloud.firestore_v1.gapic import firestore_client + + # Create a minimal fake GAPIC with a dummy result. + firestore_api = mock.create_autospec( + firestore_client.FirestoreClient, instance=True + ) + # Make sure the first two requests fail and the third succeeds. + firestore_api.commit.side_effect = [ + exceptions.ServiceUnavailable("Server sleepy."), + exceptions.ServiceUnavailable("Server groggy."), + mock.sentinel.commit_response, + ] + + # Attach the fake GAPIC to a real client. + client = _make_client("outside") + client._firestore_api_internal = firestore_api + + # Call function and check result. + txn_id = b"the-world\x00" + commit_response = self._call_fut(client, mock.sentinel.write_pbs, txn_id) + self.assertIs(commit_response, mock.sentinel.commit_response) + + # Verify mocks used. + self.assertEqual(_sleep.call_count, 2) + _sleep.assert_any_call(1.0) + _sleep.assert_any_call(2.0) + # commit() called same way 3 times. + commit_call = mock.call( + client._database_string, + mock.sentinel.write_pbs, + transaction=txn_id, + metadata=client._rpc_metadata, + ) + self.assertEqual( + firestore_api.commit.mock_calls, [commit_call, commit_call, commit_call] + ) + + @mock.patch("google.cloud.firestore_v1.transaction._sleep") + def test_failure_first_attempt(self, _sleep): + from google.api_core import exceptions + from google.cloud.firestore_v1.gapic import firestore_client + + # Create a minimal fake GAPIC with a dummy result. + firestore_api = mock.create_autospec( + firestore_client.FirestoreClient, instance=True + ) + # Make sure the first request fails with an un-retryable error. + exc = exceptions.ResourceExhausted("We ran out of fries.") + firestore_api.commit.side_effect = exc + + # Attach the fake GAPIC to a real client. + client = _make_client("peanut-butter") + client._firestore_api_internal = firestore_api + + # Call function and check result. + txn_id = b"\x08\x06\x07\x05\x03\x00\x09-jenny" + with self.assertRaises(exceptions.ResourceExhausted) as exc_info: + self._call_fut(client, mock.sentinel.write_pbs, txn_id) + + self.assertIs(exc_info.exception, exc) + + # Verify mocks used. + _sleep.assert_not_called() + firestore_api.commit.assert_called_once_with( + client._database_string, + mock.sentinel.write_pbs, + transaction=txn_id, + metadata=client._rpc_metadata, + ) + + @mock.patch("google.cloud.firestore_v1.transaction._sleep", return_value=2.0) + def test_failure_second_attempt(self, _sleep): + from google.api_core import exceptions + from google.cloud.firestore_v1.gapic import firestore_client + + # Create a minimal fake GAPIC with a dummy result. + firestore_api = mock.create_autospec( + firestore_client.FirestoreClient, instance=True + ) + # Make sure the first request fails retry-able and second + # fails non-retryable. + exc1 = exceptions.ServiceUnavailable("Come back next time.") + exc2 = exceptions.InternalServerError("Server on fritz.") + firestore_api.commit.side_effect = [exc1, exc2] + + # Attach the fake GAPIC to a real client. + client = _make_client("peanut-butter") + client._firestore_api_internal = firestore_api + + # Call function and check result. + txn_id = b"the-journey-when-and-where-well-go" + with self.assertRaises(exceptions.InternalServerError) as exc_info: + self._call_fut(client, mock.sentinel.write_pbs, txn_id) + + self.assertIs(exc_info.exception, exc2) + + # Verify mocks used. + _sleep.assert_called_once_with(1.0) + # commit() called same way 2 times. + commit_call = mock.call( + client._database_string, + mock.sentinel.write_pbs, + transaction=txn_id, + metadata=client._rpc_metadata, + ) + self.assertEqual(firestore_api.commit.mock_calls, [commit_call, commit_call]) + + +class Test__sleep(unittest.TestCase): + @staticmethod + def _call_fut(current_sleep, **kwargs): + from google.cloud.firestore_v1.transaction import _sleep + + return _sleep(current_sleep, **kwargs) + + @mock.patch("random.uniform", return_value=5.5) + @mock.patch("time.sleep", return_value=None) + def test_defaults(self, sleep, uniform): + curr_sleep = 10.0 + self.assertLessEqual(uniform.return_value, curr_sleep) + + new_sleep = self._call_fut(curr_sleep) + self.assertEqual(new_sleep, 2.0 * curr_sleep) + + uniform.assert_called_once_with(0.0, curr_sleep) + sleep.assert_called_once_with(uniform.return_value) + + @mock.patch("random.uniform", return_value=10.5) + @mock.patch("time.sleep", return_value=None) + def test_explicit(self, sleep, uniform): + curr_sleep = 12.25 + self.assertLessEqual(uniform.return_value, curr_sleep) + + multiplier = 1.5 + new_sleep = self._call_fut(curr_sleep, max_sleep=100.0, multiplier=multiplier) + self.assertEqual(new_sleep, multiplier * curr_sleep) + + uniform.assert_called_once_with(0.0, curr_sleep) + sleep.assert_called_once_with(uniform.return_value) + + @mock.patch("random.uniform", return_value=6.75) + @mock.patch("time.sleep", return_value=None) + def test_exceeds_max(self, sleep, uniform): + curr_sleep = 20.0 + self.assertLessEqual(uniform.return_value, curr_sleep) + + max_sleep = 38.5 + new_sleep = self._call_fut(curr_sleep, max_sleep=max_sleep, multiplier=2.0) + self.assertEqual(new_sleep, max_sleep) + + uniform.assert_called_once_with(0.0, curr_sleep) + sleep.assert_called_once_with(uniform.return_value) + + +def _make_credentials(): + import google.auth.credentials + + return mock.Mock(spec=google.auth.credentials.Credentials) + + +def _make_client(project="feral-tom-cat"): + from google.cloud.firestore_v1.client import Client + + credentials = _make_credentials() + return Client(project=project, credentials=credentials) + + +def _make_transaction(txn_id, **txn_kwargs): + from google.protobuf import empty_pb2 + from google.cloud.firestore_v1.gapic import firestore_client + from google.cloud.firestore_v1.proto import firestore_pb2 + from google.cloud.firestore_v1.proto import write_pb2 + from google.cloud.firestore_v1.transaction import Transaction + + # Create a fake GAPIC ... + firestore_api = mock.create_autospec( + firestore_client.FirestoreClient, instance=True + ) + # ... with a dummy ``BeginTransactionResponse`` result ... + begin_response = firestore_pb2.BeginTransactionResponse(transaction=txn_id) + firestore_api.begin_transaction.return_value = begin_response + # ... and a dummy ``Rollback`` result ... + firestore_api.rollback.return_value = empty_pb2.Empty() + # ... and a dummy ``Commit`` result. + commit_response = firestore_pb2.CommitResponse( + write_results=[write_pb2.WriteResult()] + ) + firestore_api.commit.return_value = commit_response + + # Attach the fake GAPIC to a real client. + client = _make_client() + client._firestore_api_internal = firestore_api + + return Transaction(client, **txn_kwargs) diff --git a/firestore/tests/unit/v1/test_transforms.py b/firestore/tests/unit/v1/test_transforms.py new file mode 100644 index 000000000000..7f0cdc4c86f6 --- /dev/null +++ b/firestore/tests/unit/v1/test_transforms.py @@ -0,0 +1,65 @@ +# Copyright 2017 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class Test_ValueList(unittest.TestCase): + @staticmethod + def _get_target_class(): + from google.cloud.firestore_v1.transforms import _ValueList + + return _ValueList + + def _make_one(self, values): + return self._get_target_class()(values) + + def test_ctor_w_non_list_non_tuple(self): + invalid_values = (None, u"phred", b"DEADBEEF", 123, {}, object()) + for invalid_value in invalid_values: + with self.assertRaises(ValueError): + self._make_one(invalid_value) + + def test_ctor_w_empty(self): + with self.assertRaises(ValueError): + self._make_one([]) + + def test_ctor_w_non_empty_list(self): + values = ["phred", "bharney"] + inst = self._make_one(values) + self.assertEqual(inst.values, values) + + def test_ctor_w_non_empty_tuple(self): + values = ("phred", "bharney") + inst = self._make_one(values) + self.assertEqual(inst.values, list(values)) + + def test___eq___other_type(self): + values = ("phred", "bharney") + inst = self._make_one(values) + other = object() + self.assertFalse(inst == other) + + def test___eq___different_values(self): + values = ("phred", "bharney") + other_values = ("wylma", "bhetty") + inst = self._make_one(values) + other = self._make_one(other_values) + self.assertFalse(inst == other) + + def test___eq___same_values(self): + values = ("phred", "bharney") + inst = self._make_one(values) + other = self._make_one(values) + self.assertTrue(inst == other) diff --git a/firestore/tests/unit/v1/test_watch.py b/firestore/tests/unit/v1/test_watch.py new file mode 100644 index 000000000000..be22809802da --- /dev/null +++ b/firestore/tests/unit/v1/test_watch.py @@ -0,0 +1,830 @@ +import datetime +import unittest +import mock +from google.cloud.firestore_v1.proto import firestore_pb2 + + +class TestWatchDocTree(unittest.TestCase): + def _makeOne(self): + from google.cloud.firestore_v1.watch import WatchDocTree + + return WatchDocTree() + + def test_insert_and_keys(self): + inst = self._makeOne() + inst = inst.insert("b", 1) + inst = inst.insert("a", 2) + self.assertEqual(sorted(inst.keys()), ["a", "b"]) + + def test_remove_and_keys(self): + inst = self._makeOne() + inst = inst.insert("b", 1) + inst = inst.insert("a", 2) + inst = inst.remove("a") + self.assertEqual(sorted(inst.keys()), ["b"]) + + def test_insert_and_find(self): + inst = self._makeOne() + inst = inst.insert("b", 1) + inst = inst.insert("a", 2) + val = inst.find("a") + self.assertEqual(val.value, 2) + + def test___len__(self): + inst = self._makeOne() + inst = inst.insert("b", 1) + inst = inst.insert("a", 2) + self.assertEqual(len(inst), 2) + + def test___iter__(self): + inst = self._makeOne() + inst = inst.insert("b", 1) + inst = inst.insert("a", 2) + self.assertEqual(sorted(list(inst)), ["a", "b"]) + + def test___contains__(self): + inst = self._makeOne() + inst = inst.insert("b", 1) + self.assertTrue("b" in inst) + self.assertFalse("a" in inst) + + +class TestDocumentChange(unittest.TestCase): + def _makeOne(self, type, document, old_index, new_index): + from google.cloud.firestore_v1.watch import DocumentChange + + return DocumentChange(type, document, old_index, new_index) + + def test_ctor(self): + inst = self._makeOne("type", "document", "old_index", "new_index") + self.assertEqual(inst.type, "type") + self.assertEqual(inst.document, "document") + self.assertEqual(inst.old_index, "old_index") + self.assertEqual(inst.new_index, "new_index") + + +class TestWatchResult(unittest.TestCase): + def _makeOne(self, snapshot, name, change_type): + from google.cloud.firestore_v1.watch import WatchResult + + return WatchResult(snapshot, name, change_type) + + def test_ctor(self): + inst = self._makeOne("snapshot", "name", "change_type") + self.assertEqual(inst.snapshot, "snapshot") + self.assertEqual(inst.name, "name") + self.assertEqual(inst.change_type, "change_type") + + +class Test_maybe_wrap_exception(unittest.TestCase): + def _callFUT(self, exc): + from google.cloud.firestore_v1.watch import _maybe_wrap_exception + + return _maybe_wrap_exception(exc) + + def test_is_grpc_error(self): + import grpc + from google.api_core.exceptions import GoogleAPICallError + + exc = grpc.RpcError() + result = self._callFUT(exc) + self.assertEqual(result.__class__, GoogleAPICallError) + + def test_is_not_grpc_error(self): + exc = ValueError() + result = self._callFUT(exc) + self.assertEqual(result.__class__, ValueError) + + +class Test_document_watch_comparator(unittest.TestCase): + def _callFUT(self, doc1, doc2): + from google.cloud.firestore_v1.watch import document_watch_comparator + + return document_watch_comparator(doc1, doc2) + + def test_same_doc(self): + result = self._callFUT(1, 1) + self.assertEqual(result, 0) + + def test_diff_doc(self): + self.assertRaises(AssertionError, self._callFUT, 1, 2) + + +class TestWatch(unittest.TestCase): + def _makeOne( + self, + document_reference=None, + firestore=None, + target=None, + comparator=None, + snapshot_callback=None, + snapshot_class=None, + reference_class=None, + ): # pragma: NO COVER + from google.cloud.firestore_v1.watch import Watch + + if document_reference is None: + document_reference = DummyDocumentReference() + if firestore is None: + firestore = DummyFirestore() + if target is None: + WATCH_TARGET_ID = 0x5079 # "Py" + target = {"documents": {"documents": ["/"]}, "target_id": WATCH_TARGET_ID} + if comparator is None: + comparator = self._document_watch_comparator + if snapshot_callback is None: + snapshot_callback = self._snapshot_callback + if snapshot_class is None: + snapshot_class = DummyDocumentSnapshot + if reference_class is None: + reference_class = DummyDocumentReference + inst = Watch( + document_reference, + firestore, + target, + comparator, + snapshot_callback, + snapshot_class, + reference_class, + BackgroundConsumer=DummyBackgroundConsumer, + ResumableBidiRpc=DummyRpc, + ) + return inst + + def setUp(self): + self.snapshotted = None + + def _document_watch_comparator(self, doc1, doc2): # pragma: NO COVER + return 0 + + def _snapshot_callback(self, docs, changes, read_time): + self.snapshotted = (docs, changes, read_time) + + def test_ctor(self): + inst = self._makeOne() + self.assertTrue(inst._consumer.started) + self.assertTrue(inst._rpc.callbacks, [inst._on_rpc_done]) + + def test__on_rpc_done(self): + inst = self._makeOne() + threading = DummyThreading() + with mock.patch("google.cloud.firestore_v1.watch.threading", threading): + inst._on_rpc_done(True) + from google.cloud.firestore_v1.watch import _RPC_ERROR_THREAD_NAME + + self.assertTrue(threading.threads[_RPC_ERROR_THREAD_NAME].started) + + def test_close(self): + inst = self._makeOne() + inst.close() + self.assertEqual(inst._consumer, None) + self.assertEqual(inst._rpc, None) + self.assertTrue(inst._closed) + + def test_close_already_closed(self): + inst = self._makeOne() + inst._closed = True + old_consumer = inst._consumer + inst.close() + self.assertEqual(inst._consumer, old_consumer) + + def test_close_inactive(self): + inst = self._makeOne() + old_consumer = inst._consumer + old_consumer.is_active = False + inst.close() + self.assertEqual(old_consumer.stopped, False) + + def test_unsubscribe(self): + inst = self._makeOne() + inst.unsubscribe() + self.assertTrue(inst._rpc is None) + + def test_for_document(self): + from google.cloud.firestore_v1.watch import Watch + + docref = DummyDocumentReference() + snapshot_callback = self._snapshot_callback + snapshot_class_instance = DummyDocumentSnapshot + document_reference_class_instance = DummyDocumentReference + modulename = "google.cloud.firestore_v1.watch" + with mock.patch("%s.Watch.ResumableBidiRpc" % modulename, DummyRpc): + with mock.patch( + "%s.Watch.BackgroundConsumer" % modulename, DummyBackgroundConsumer + ): + inst = Watch.for_document( + docref, + snapshot_callback, + snapshot_class_instance, + document_reference_class_instance, + ) + self.assertTrue(inst._consumer.started) + self.assertTrue(inst._rpc.callbacks, [inst._on_rpc_done]) + + def test_for_query(self): + from google.cloud.firestore_v1.watch import Watch + + snapshot_callback = self._snapshot_callback + snapshot_class_instance = DummyDocumentSnapshot + document_reference_class_instance = DummyDocumentReference + modulename = "google.cloud.firestore_v1.watch" + pb2 = DummyPb2() + with mock.patch("%s.firestore_pb2" % modulename, pb2): + with mock.patch("%s.Watch.ResumableBidiRpc" % modulename, DummyRpc): + with mock.patch( + "%s.Watch.BackgroundConsumer" % modulename, DummyBackgroundConsumer + ): + query = DummyQuery() + inst = Watch.for_query( + query, + snapshot_callback, + snapshot_class_instance, + document_reference_class_instance, + ) + self.assertTrue(inst._consumer.started) + self.assertTrue(inst._rpc.callbacks, [inst._on_rpc_done]) + self.assertEqual(inst._targets["query"], "dummy query target") + + def test_on_snapshot_target_no_change_no_target_ids_not_current(self): + inst = self._makeOne() + proto = DummyProto() + inst.on_snapshot(proto) # nothing to assert, no mutations, no rtnval + + def test_on_snapshot_target_no_change_no_target_ids_current(self): + inst = self._makeOne() + proto = DummyProto() + proto.target_change.read_time = 1 + inst.current = True + + def push(read_time, next_resume_token): + inst._read_time = read_time + inst._next_resume_token = next_resume_token + + inst.push = push + inst.on_snapshot(proto) + self.assertEqual(inst._read_time, 1) + self.assertEqual(inst._next_resume_token, None) + + def test_on_snapshot_target_add(self): + inst = self._makeOne() + proto = DummyProto() + proto.target_change.target_change_type = firestore_pb2.TargetChange.ADD + proto.target_change.target_ids = [1] # not "Py" + with self.assertRaises(Exception) as exc: + inst.on_snapshot(proto) + self.assertEqual(str(exc.exception), "Unexpected target ID 1 sent by server") + + def test_on_snapshot_target_remove(self): + inst = self._makeOne() + proto = DummyProto() + target_change = proto.target_change + target_change.target_change_type = firestore_pb2.TargetChange.REMOVE + with self.assertRaises(Exception) as exc: + inst.on_snapshot(proto) + self.assertEqual(str(exc.exception), "Error 1: hi") + + def test_on_snapshot_target_remove_nocause(self): + inst = self._makeOne() + proto = DummyProto() + target_change = proto.target_change + target_change.cause = None + target_change.target_change_type = firestore_pb2.TargetChange.REMOVE + with self.assertRaises(Exception) as exc: + inst.on_snapshot(proto) + self.assertEqual(str(exc.exception), "Error 13: internal error") + + def test_on_snapshot_target_reset(self): + inst = self._makeOne() + + def reset(): + inst._docs_reset = True + + inst._reset_docs = reset + proto = DummyProto() + target_change = proto.target_change + target_change.target_change_type = firestore_pb2.TargetChange.RESET + inst.on_snapshot(proto) + self.assertTrue(inst._docs_reset) + + def test_on_snapshot_target_current(self): + inst = self._makeOne() + inst.current = False + proto = DummyProto() + target_change = proto.target_change + target_change.target_change_type = firestore_pb2.TargetChange.CURRENT + inst.on_snapshot(proto) + self.assertTrue(inst.current) + + def test_on_snapshot_target_unknown(self): + inst = self._makeOne() + proto = DummyProto() + proto.target_change.target_change_type = "unknown" + with self.assertRaises(Exception) as exc: + inst.on_snapshot(proto) + self.assertTrue(inst._consumer is None) + self.assertTrue(inst._rpc is None) + self.assertEqual(str(exc.exception), "Unknown target change type: unknown ") + + def test_on_snapshot_document_change_removed(self): + from google.cloud.firestore_v1.watch import WATCH_TARGET_ID, ChangeType + + inst = self._makeOne() + proto = DummyProto() + proto.target_change = "" + proto.document_change.removed_target_ids = [WATCH_TARGET_ID] + + class DummyDocument: + name = "fred" + + proto.document_change.document = DummyDocument() + inst.on_snapshot(proto) + self.assertTrue(inst.change_map["fred"] is ChangeType.REMOVED) + + def test_on_snapshot_document_change_changed(self): + from google.cloud.firestore_v1.watch import WATCH_TARGET_ID + + inst = self._makeOne() + + proto = DummyProto() + proto.target_change = "" + proto.document_change.target_ids = [WATCH_TARGET_ID] + + class DummyDocument: + name = "fred" + fields = {} + create_time = None + update_time = None + + proto.document_change.document = DummyDocument() + inst.on_snapshot(proto) + self.assertEqual(inst.change_map["fred"].data, {}) + + def test_on_snapshot_document_change_changed_docname_db_prefix(self): + # TODO: Verify the current behavior. The change map currently contains + # the db-prefixed document name and not the bare document name. + from google.cloud.firestore_v1.watch import WATCH_TARGET_ID + + inst = self._makeOne() + + proto = DummyProto() + proto.target_change = "" + proto.document_change.target_ids = [WATCH_TARGET_ID] + + class DummyDocument: + name = "abc://foo/documents/fred" + fields = {} + create_time = None + update_time = None + + proto.document_change.document = DummyDocument() + inst._firestore._database_string = "abc://foo" + inst.on_snapshot(proto) + self.assertEqual(inst.change_map["abc://foo/documents/fred"].data, {}) + + def test_on_snapshot_document_change_neither_changed_nor_removed(self): + inst = self._makeOne() + proto = DummyProto() + proto.target_change = "" + proto.document_change.target_ids = [] + + inst.on_snapshot(proto) + self.assertTrue(not inst.change_map) + + def test_on_snapshot_document_removed(self): + from google.cloud.firestore_v1.watch import ChangeType + + inst = self._makeOne() + proto = DummyProto() + proto.target_change = "" + proto.document_change = "" + + class DummyRemove(object): + document = "fred" + + remove = DummyRemove() + proto.document_remove = remove + proto.document_delete = "" + inst.on_snapshot(proto) + self.assertTrue(inst.change_map["fred"] is ChangeType.REMOVED) + + def test_on_snapshot_filter_update(self): + inst = self._makeOne() + proto = DummyProto() + proto.target_change = "" + proto.document_change = "" + proto.document_remove = "" + proto.document_delete = "" + + class DummyFilter(object): + count = 999 + + proto.filter = DummyFilter() + + def reset(): + inst._docs_reset = True + + inst._reset_docs = reset + inst.on_snapshot(proto) + self.assertTrue(inst._docs_reset) + + def test_on_snapshot_filter_update_no_size_change(self): + inst = self._makeOne() + proto = DummyProto() + proto.target_change = "" + proto.document_change = "" + proto.document_remove = "" + proto.document_delete = "" + + class DummyFilter(object): + count = 0 + + proto.filter = DummyFilter() + inst._docs_reset = False + + inst.on_snapshot(proto) + self.assertFalse(inst._docs_reset) + + def test_on_snapshot_unknown_listen_type(self): + inst = self._makeOne() + proto = DummyProto() + proto.target_change = "" + proto.document_change = "" + proto.document_remove = "" + proto.document_delete = "" + proto.filter = "" + with self.assertRaises(Exception) as exc: + inst.on_snapshot(proto) + self.assertTrue( + str(exc.exception).startswith("Unknown listen response type"), + str(exc.exception), + ) + + def test_push_callback_called_no_changes(self): + import pytz + + class DummyReadTime(object): + seconds = 1534858278 + + inst = self._makeOne() + inst.push(DummyReadTime, "token") + self.assertEqual( + self.snapshotted, + ([], [], datetime.datetime.fromtimestamp(DummyReadTime.seconds, pytz.utc)), + ) + self.assertTrue(inst.has_pushed) + self.assertEqual(inst.resume_token, "token") + + def test_push_already_pushed(self): + class DummyReadTime(object): + seconds = 1534858278 + + inst = self._makeOne() + inst.has_pushed = True + inst.push(DummyReadTime, "token") + self.assertEqual(self.snapshotted, None) + self.assertTrue(inst.has_pushed) + self.assertEqual(inst.resume_token, "token") + + def test__current_size_empty(self): + inst = self._makeOne() + result = inst._current_size() + self.assertEqual(result, 0) + + def test__current_size_docmap_has_one(self): + inst = self._makeOne() + inst.doc_map["a"] = 1 + result = inst._current_size() + self.assertEqual(result, 1) + + def test__affects_target_target_id_None(self): + inst = self._makeOne() + self.assertTrue(inst._affects_target(None, [])) + + def test__affects_target_current_id_in_target_ids(self): + inst = self._makeOne() + self.assertTrue(inst._affects_target([1], 1)) + + def test__affects_target_current_id_not_in_target_ids(self): + inst = self._makeOne() + self.assertFalse(inst._affects_target([1], 2)) + + def test__extract_changes_doc_removed(self): + from google.cloud.firestore_v1.watch import ChangeType + + inst = self._makeOne() + changes = {"name": ChangeType.REMOVED} + doc_map = {"name": True} + results = inst._extract_changes(doc_map, changes, None) + self.assertEqual(results, (["name"], [], [])) + + def test__extract_changes_doc_removed_docname_not_in_docmap(self): + from google.cloud.firestore_v1.watch import ChangeType + + inst = self._makeOne() + changes = {"name": ChangeType.REMOVED} + doc_map = {} + results = inst._extract_changes(doc_map, changes, None) + self.assertEqual(results, ([], [], [])) + + def test__extract_changes_doc_updated(self): + inst = self._makeOne() + + class Dummy(object): + pass + + doc = Dummy() + snapshot = Dummy() + changes = {"name": snapshot} + doc_map = {"name": doc} + results = inst._extract_changes(doc_map, changes, 1) + self.assertEqual(results, ([], [], [snapshot])) + self.assertEqual(snapshot.read_time, 1) + + def test__extract_changes_doc_updated_read_time_is_None(self): + inst = self._makeOne() + + class Dummy(object): + pass + + doc = Dummy() + snapshot = Dummy() + snapshot.read_time = None + changes = {"name": snapshot} + doc_map = {"name": doc} + results = inst._extract_changes(doc_map, changes, None) + self.assertEqual(results, ([], [], [snapshot])) + self.assertEqual(snapshot.read_time, None) + + def test__extract_changes_doc_added(self): + inst = self._makeOne() + + class Dummy(object): + pass + + snapshot = Dummy() + changes = {"name": snapshot} + doc_map = {} + results = inst._extract_changes(doc_map, changes, 1) + self.assertEqual(results, ([], [snapshot], [])) + self.assertEqual(snapshot.read_time, 1) + + def test__extract_changes_doc_added_read_time_is_None(self): + inst = self._makeOne() + + class Dummy(object): + pass + + snapshot = Dummy() + snapshot.read_time = None + changes = {"name": snapshot} + doc_map = {} + results = inst._extract_changes(doc_map, changes, None) + self.assertEqual(results, ([], [snapshot], [])) + self.assertEqual(snapshot.read_time, None) + + def test__compute_snapshot_doctree_and_docmap_disagree_about_length(self): + inst = self._makeOne() + doc_tree = {} + doc_map = {None: None} + self.assertRaises( + AssertionError, inst._compute_snapshot, doc_tree, doc_map, None, None, None + ) + + def test__compute_snapshot_operation_relative_ordering(self): + from google.cloud.firestore_v1.watch import WatchDocTree + + doc_tree = WatchDocTree() + + class DummyDoc(object): + update_time = mock.sentinel + + deleted_doc = DummyDoc() + added_doc = DummyDoc() + added_doc._document_path = "/added" + updated_doc = DummyDoc() + updated_doc._document_path = "/updated" + doc_tree = doc_tree.insert(deleted_doc, None) + doc_tree = doc_tree.insert(updated_doc, None) + doc_map = {"/deleted": deleted_doc, "/updated": updated_doc} + added_snapshot = DummyDocumentSnapshot(added_doc, None, True, None, None, None) + added_snapshot.reference = added_doc + updated_snapshot = DummyDocumentSnapshot( + updated_doc, None, True, None, None, None + ) + updated_snapshot.reference = updated_doc + delete_changes = ["/deleted"] + add_changes = [added_snapshot] + update_changes = [updated_snapshot] + inst = self._makeOne() + updated_tree, updated_map, applied_changes = inst._compute_snapshot( + doc_tree, doc_map, delete_changes, add_changes, update_changes + ) + # TODO: Verify that the assertion here is correct. + self.assertEqual( + updated_map, {"/updated": updated_snapshot, "/added": added_snapshot} + ) + + def test__compute_snapshot_modify_docs_updated_doc_no_timechange(self): + from google.cloud.firestore_v1.watch import WatchDocTree + + doc_tree = WatchDocTree() + + class DummyDoc(object): + pass + + updated_doc_v1 = DummyDoc() + updated_doc_v1.update_time = 1 + updated_doc_v1._document_path = "/updated" + updated_doc_v2 = DummyDoc() + updated_doc_v2.update_time = 1 + updated_doc_v2._document_path = "/updated" + doc_tree = doc_tree.insert("/updated", updated_doc_v1) + doc_map = {"/updated": updated_doc_v1} + updated_snapshot = DummyDocumentSnapshot( + updated_doc_v2, None, True, None, None, 1 + ) + delete_changes = [] + add_changes = [] + update_changes = [updated_snapshot] + inst = self._makeOne() + updated_tree, updated_map, applied_changes = inst._compute_snapshot( + doc_tree, doc_map, delete_changes, add_changes, update_changes + ) + self.assertEqual(updated_map, doc_map) # no change + + def test__reset_docs(self): + from google.cloud.firestore_v1.watch import ChangeType + + inst = self._makeOne() + inst.change_map = {None: None} + from google.cloud.firestore_v1.watch import WatchDocTree + + doc = DummyDocumentReference("doc") + doc_tree = WatchDocTree() + snapshot = DummyDocumentSnapshot(doc, None, True, None, None, None) + snapshot.reference = doc + doc_tree = doc_tree.insert(snapshot, None) + inst.doc_tree = doc_tree + inst._reset_docs() + self.assertEqual(inst.change_map, {"/doc": ChangeType.REMOVED}) + self.assertEqual(inst.resume_token, None) + self.assertFalse(inst.current) + + +class DummyFirestoreStub(object): + def Listen(self): # pragma: NO COVER + pass + + +class DummyFirestoreClient(object): + def __init__(self): + self.transport = mock.Mock(_stubs={"firestore_stub": DummyFirestoreStub()}) + + +class DummyDocumentReference(object): + def __init__(self, *document_path, **kw): + if "client" not in kw: + self._client = DummyFirestore() + else: + self._client = kw["client"] + + self._path = document_path + self._document_path = "/" + "/".join(document_path) + self.__dict__.update(kw) + + +class DummyQuery(object): # pragma: NO COVER + def __init__(self, **kw): + if "client" not in kw: + self._client = DummyFirestore() + else: + self._client = kw["client"] + + if "comparator" not in kw: + # don't really do the comparison, just return 0 (equal) for all + self._comparator = lambda x, y: 1 + else: + self._comparator = kw["comparator"] + + def _to_protobuf(self): + return "" + + +class DummyFirestore(object): + _firestore_api = DummyFirestoreClient() + _database_string = "abc://bar/" + + def document(self, *document_path): # pragma: NO COVER + if len(document_path) == 1: + path = document_path[0].split("/") + else: + path = document_path + + return DummyDocumentReference(*path, client=self) + + +class DummyDocumentSnapshot(object): + # def __init__(self, **kw): + # self.__dict__.update(kw) + def __init__(self, reference, data, exists, read_time, create_time, update_time): + self.reference = reference + self.data = data + self.exists = exists + self.read_time = read_time + self.create_time = create_time + self.update_time = update_time + + def __str__(self): + return "%s-%s" % (self.reference._document_path, self.read_time) + + def __hash__(self): + return hash(str(self)) + + +class DummyBackgroundConsumer(object): + started = False + stopped = False + is_active = True + + def __init__(self, rpc, on_snapshot): + self._rpc = rpc + self.on_snapshot = on_snapshot + + def start(self): + self.started = True + + def stop(self): + self.stopped = True + self.is_active = False + + +class DummyThread(object): + started = False + + def __init__(self, name, target, kwargs): + self.name = name + self.target = target + self.kwargs = kwargs + + def start(self): + self.started = True + + +class DummyThreading(object): + def __init__(self): + self.threads = {} + + def Thread(self, name, target, kwargs): + thread = DummyThread(name, target, kwargs) + self.threads[name] = thread + return thread + + +class DummyRpc(object): + def __init__(self, listen, initial_request, should_recover): + self.listen = listen + self.initial_request = initial_request + self.should_recover = should_recover + self.closed = False + self.callbacks = [] + + def add_done_callback(self, callback): + self.callbacks.append(callback) + + def close(self): + self.closed = True + + +class DummyCause(object): + code = 1 + message = "hi" + + +class DummyChange(object): + def __init__(self): + self.target_ids = [] + self.removed_target_ids = [] + self.read_time = 0 + self.target_change_type = firestore_pb2.TargetChange.NO_CHANGE + self.resume_token = None + self.cause = DummyCause() + + +class DummyProto(object): + def __init__(self): + self.target_change = DummyChange() + self.document_change = DummyChange() + + +class DummyTarget(object): + def QueryTarget(self, **kw): + self.kw = kw + return "dummy query target" + + +class DummyPb2(object): + + Target = DummyTarget() + + def ListenRequest(self, **kw): + pass diff --git a/firestore/tests/unit/v1/testdata/create-all-transforms.textproto b/firestore/tests/unit/v1/testdata/create-all-transforms.textproto new file mode 100644 index 000000000000..bbdf19e4df4a --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-all-transforms.textproto @@ -0,0 +1,64 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can be created with any amount of transforms. + +description: "create: all transforms in a single call" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": [\"ArrayUnion\", 1, 2, 3], \"d\": [\"ArrayRemove\", 4, 5, 6]}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + current_document: < + exists: false + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + field_transforms: < + field_path: "c" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + field_transforms: < + field_path: "d" + remove_all_from_array: < + values: < + integer_value: 4 + > + values: < + integer_value: 5 + > + values: < + integer_value: 6 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/create-arrayremove-multi.textproto b/firestore/tests/unit/v1/testdata/create-arrayremove-multi.textproto new file mode 100644 index 000000000000..f80d65b2381a --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-arrayremove-multi.textproto @@ -0,0 +1,61 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can have more than one ArrayRemove field. Since all the ArrayRemove +# fields are removed, the only field in the update is "a". + +description: "create: multiple ArrayRemove fields" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3], \"c\": {\"d\": [\"ArrayRemove\", 4, 5, 6]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + current_document: < + exists: false + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + remove_all_from_array: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + field_transforms: < + field_path: "c.d" + remove_all_from_array: < + values: < + integer_value: 4 + > + values: < + integer_value: 5 + > + values: < + integer_value: 6 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/create-arrayremove-nested.textproto b/firestore/tests/unit/v1/testdata/create-arrayremove-nested.textproto new file mode 100644 index 000000000000..97756c306c18 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-arrayremove-nested.textproto @@ -0,0 +1,48 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# An ArrayRemove value can occur at any depth. In this case, the transform applies +# to the field path "b.c". Since "c" is removed from the update, "b" becomes +# empty, so it is also removed from the update. + +description: "create: nested ArrayRemove field" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayRemove\", 1, 2, 3]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + current_document: < + exists: false + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b.c" + remove_all_from_array: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/create-arrayremove-noarray-nested.textproto b/firestore/tests/unit/v1/testdata/create-arrayremove-noarray-nested.textproto new file mode 100644 index 000000000000..4ec0cb3b9376 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-arrayremove-noarray-nested.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There cannot be an array value anywhere on the path from the document root to +# the ArrayRemove. Firestore transforms don't support array indexing. + +description: "create: ArrayRemove cannot be anywhere inside an array value" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": [\"ArrayRemove\", 1, 2, 3]}]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/create-arrayremove-noarray.textproto b/firestore/tests/unit/v1/testdata/create-arrayremove-noarray.textproto new file mode 100644 index 000000000000..969b8d9dd84e --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-arrayremove-noarray.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# ArrayRemove must be the value of a field. Firestore transforms don't support +# array indexing. + +description: "create: ArrayRemove cannot be in an array value" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/create-arrayremove-with-st.textproto b/firestore/tests/unit/v1/testdata/create-arrayremove-with-st.textproto new file mode 100644 index 000000000000..b6ea3224de73 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-arrayremove-with-st.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The ServerTimestamp sentinel must be the value of a field. It may not appear in +# an ArrayUnion. + +description: "create: The ServerTimestamp sentinel cannot be in an ArrayUnion" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [\"ArrayRemove\", 1, \"ServerTimestamp\", 3]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/create-arrayremove.textproto b/firestore/tests/unit/v1/testdata/create-arrayremove.textproto new file mode 100644 index 000000000000..e8e4bb3980db --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-arrayremove.textproto @@ -0,0 +1,47 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A key with ArrayRemove is removed from the data in the update operation. Instead +# it appears in a separate Transform operation. + +description: "create: ArrayRemove with data" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3]}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + current_document: < + exists: false + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + remove_all_from_array: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/create-arrayunion-multi.textproto b/firestore/tests/unit/v1/testdata/create-arrayunion-multi.textproto new file mode 100644 index 000000000000..ec3cb72f5b1b --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-arrayunion-multi.textproto @@ -0,0 +1,61 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can have more than one ArrayUnion field. Since all the ArrayUnion +# fields are removed, the only field in the update is "a". + +description: "create: multiple ArrayUnion fields" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3], \"c\": {\"d\": [\"ArrayUnion\", 4, 5, 6]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + current_document: < + exists: false + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + field_transforms: < + field_path: "c.d" + append_missing_elements: < + values: < + integer_value: 4 + > + values: < + integer_value: 5 + > + values: < + integer_value: 6 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/create-arrayunion-nested.textproto b/firestore/tests/unit/v1/testdata/create-arrayunion-nested.textproto new file mode 100644 index 000000000000..e6e81bc1d7a2 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-arrayunion-nested.textproto @@ -0,0 +1,48 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# An ArrayUnion value can occur at any depth. In this case, the transform applies +# to the field path "b.c". Since "c" is removed from the update, "b" becomes +# empty, so it is also removed from the update. + +description: "create: nested ArrayUnion field" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayUnion\", 1, 2, 3]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + current_document: < + exists: false + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b.c" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/create-arrayunion-noarray-nested.textproto b/firestore/tests/unit/v1/testdata/create-arrayunion-noarray-nested.textproto new file mode 100644 index 000000000000..4c0afe443048 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-arrayunion-noarray-nested.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There cannot be an array value anywhere on the path from the document root to +# the ArrayUnion. Firestore transforms don't support array indexing. + +description: "create: ArrayUnion cannot be anywhere inside an array value" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": [\"ArrayUnion\", 1, 2, 3]}]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/create-arrayunion-noarray.textproto b/firestore/tests/unit/v1/testdata/create-arrayunion-noarray.textproto new file mode 100644 index 000000000000..7b791fa4154d --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-arrayunion-noarray.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# ArrayUnion must be the value of a field. Firestore transforms don't support +# array indexing. + +description: "create: ArrayUnion cannot be in an array value" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/create-arrayunion-with-st.textproto b/firestore/tests/unit/v1/testdata/create-arrayunion-with-st.textproto new file mode 100644 index 000000000000..a1bf4a90d1c4 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-arrayunion-with-st.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The ServerTimestamp sentinel must be the value of a field. It may not appear in +# an ArrayUnion. + +description: "create: The ServerTimestamp sentinel cannot be in an ArrayUnion" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [\"ArrayUnion\", 1, \"ServerTimestamp\", 3]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/create-arrayunion.textproto b/firestore/tests/unit/v1/testdata/create-arrayunion.textproto new file mode 100644 index 000000000000..98cb6ad8acb1 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-arrayunion.textproto @@ -0,0 +1,47 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A key with ArrayUnion is removed from the data in the update operation. Instead +# it appears in a separate Transform operation. + +description: "create: ArrayUnion with data" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3]}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + current_document: < + exists: false + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/create-basic.textproto b/firestore/tests/unit/v1/testdata/create-basic.textproto new file mode 100644 index 000000000000..433ffda72704 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-basic.textproto @@ -0,0 +1,27 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A simple call, resulting in a single update operation. + +description: "create: basic" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + current_document: < + exists: false + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/create-complex.textproto b/firestore/tests/unit/v1/testdata/create-complex.textproto new file mode 100644 index 000000000000..00a994e204a2 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-complex.textproto @@ -0,0 +1,61 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A call to a write method with complicated input data. + +description: "create: complex" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2.5], \"b\": {\"c\": [\"three\", {\"d\": true}]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + array_value: < + values: < + integer_value: 1 + > + values: < + double_value: 2.5 + > + > + > + > + fields: < + key: "b" + value: < + map_value: < + fields: < + key: "c" + value: < + array_value: < + values: < + string_value: "three" + > + values: < + map_value: < + fields: < + key: "d" + value: < + boolean_value: true + > + > + > + > + > + > + > + > + > + > + > + current_document: < + exists: false + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/create-del-noarray-nested.textproto b/firestore/tests/unit/v1/testdata/create-del-noarray-nested.textproto new file mode 100644 index 000000000000..60694e137163 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-del-noarray-nested.textproto @@ -0,0 +1,13 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a field. Deletes are implemented by +# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not +# support array indexing. + +description: "create: Delete cannot be anywhere inside an array value" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": \"Delete\"}]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/create-del-noarray.textproto b/firestore/tests/unit/v1/testdata/create-del-noarray.textproto new file mode 100644 index 000000000000..5731be1c7357 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-del-noarray.textproto @@ -0,0 +1,13 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a field. Deletes are implemented by +# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not +# support array indexing. + +description: "create: Delete cannot be in an array value" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, \"Delete\"]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/create-empty.textproto b/firestore/tests/unit/v1/testdata/create-empty.textproto new file mode 100644 index 000000000000..2b6fec7efafd --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-empty.textproto @@ -0,0 +1,20 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + + +description: "create: creating or setting an empty map" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + > + current_document: < + exists: false + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/create-nodel.textproto b/firestore/tests/unit/v1/testdata/create-nodel.textproto new file mode 100644 index 000000000000..c878814b1128 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-nodel.textproto @@ -0,0 +1,11 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel cannot be used in Create, or in Set without a Merge option. + +description: "create: Delete cannot appear in data" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"Delete\"}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/create-nosplit.textproto b/firestore/tests/unit/v1/testdata/create-nosplit.textproto new file mode 100644 index 000000000000..e9e1ee2755f5 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-nosplit.textproto @@ -0,0 +1,40 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Create and Set treat their map keys literally. They do not split on dots. + +description: "create: don\342\200\231t split on dots" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{ \"a.b\": { \"c.d\": 1 }, \"e\": 2 }" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a.b" + value: < + map_value: < + fields: < + key: "c.d" + value: < + integer_value: 1 + > + > + > + > + > + fields: < + key: "e" + value: < + integer_value: 2 + > + > + > + current_document: < + exists: false + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/create-special-chars.textproto b/firestore/tests/unit/v1/testdata/create-special-chars.textproto new file mode 100644 index 000000000000..3a7acd3075de --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-special-chars.textproto @@ -0,0 +1,41 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Create and Set treat their map keys literally. They do not escape special +# characters. + +description: "create: non-alpha characters in map keys" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{ \"*\": { \".\": 1 }, \"~\": 2 }" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "*" + value: < + map_value: < + fields: < + key: "." + value: < + integer_value: 1 + > + > + > + > + > + fields: < + key: "~" + value: < + integer_value: 2 + > + > + > + current_document: < + exists: false + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/create-st-alone.textproto b/firestore/tests/unit/v1/testdata/create-st-alone.textproto new file mode 100644 index 000000000000..9803a676bbe0 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-st-alone.textproto @@ -0,0 +1,26 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the only values in the input are ServerTimestamps, then no update operation +# should be produced. + +description: "create: ServerTimestamp alone" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a" + set_to_server_value: REQUEST_TIME + > + > + current_document: < + exists: false + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/create-st-multi.textproto b/firestore/tests/unit/v1/testdata/create-st-multi.textproto new file mode 100644 index 000000000000..cb3db480999a --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-st-multi.textproto @@ -0,0 +1,41 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can have more than one ServerTimestamp field. Since all the +# ServerTimestamp fields are removed, the only field in the update is "a". + +description: "create: multiple ServerTimestamp fields" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": {\"d\": \"ServerTimestamp\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + current_document: < + exists: false + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + field_transforms: < + field_path: "c.d" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/create-st-nested.textproto b/firestore/tests/unit/v1/testdata/create-st-nested.textproto new file mode 100644 index 000000000000..6bc03e8e7ca0 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-st-nested.textproto @@ -0,0 +1,38 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A ServerTimestamp value can occur at any depth. In this case, the transform +# applies to the field path "b.c". Since "c" is removed from the update, "b" +# becomes empty, so it is also removed from the update. + +description: "create: nested ServerTimestamp field" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": {\"c\": \"ServerTimestamp\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + current_document: < + exists: false + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b.c" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/create-st-noarray-nested.textproto b/firestore/tests/unit/v1/testdata/create-st-noarray-nested.textproto new file mode 100644 index 000000000000..0cec0aebd4bf --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-st-noarray-nested.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There cannot be an array value anywhere on the path from the document root to +# the ServerTimestamp sentinel. Firestore transforms don't support array indexing. + +description: "create: ServerTimestamp cannot be anywhere inside an array value" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": \"ServerTimestamp\"}]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/create-st-noarray.textproto b/firestore/tests/unit/v1/testdata/create-st-noarray.textproto new file mode 100644 index 000000000000..56d91c2cfb5a --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-st-noarray.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The ServerTimestamp sentinel must be the value of a field. Firestore transforms +# don't support array indexing. + +description: "create: ServerTimestamp cannot be in an array value" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, \"ServerTimestamp\"]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/create-st-with-empty-map.textproto b/firestore/tests/unit/v1/testdata/create-st-with-empty-map.textproto new file mode 100644 index 000000000000..37e7e074abec --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-st-with-empty-map.textproto @@ -0,0 +1,45 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# When a ServerTimestamp and a map both reside inside a map, the ServerTimestamp +# should be stripped out but the empty map should remain. + +description: "create: ServerTimestamp beside an empty map" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": {\"b\": {}, \"c\": \"ServerTimestamp\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + map_value: < + fields: < + key: "b" + value: < + map_value: < + > + > + > + > + > + > + > + current_document: < + exists: false + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a.c" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/create-st.textproto b/firestore/tests/unit/v1/testdata/create-st.textproto new file mode 100644 index 000000000000..ddfc6a177e16 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/create-st.textproto @@ -0,0 +1,39 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A key with the special ServerTimestamp sentinel is removed from the data in the +# update operation. Instead it appears in a separate Transform operation. Note +# that in these tests, the string "ServerTimestamp" should be replaced with the +# special ServerTimestamp value. + +description: "create: ServerTimestamp with data" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + current_document: < + exists: false + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/delete-exists-precond.textproto b/firestore/tests/unit/v1/testdata/delete-exists-precond.textproto new file mode 100644 index 000000000000..c9cf2ddea4e6 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/delete-exists-precond.textproto @@ -0,0 +1,21 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Delete supports an exists precondition. + +description: "delete: delete with exists precondition" +delete: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + precondition: < + exists: true + > + request: < + database: "projects/projectID/databases/(default)" + writes: < + delete: "projects/projectID/databases/(default)/documents/C/d" + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/delete-no-precond.textproto b/firestore/tests/unit/v1/testdata/delete-no-precond.textproto new file mode 100644 index 000000000000..a396cdb8c4a1 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/delete-no-precond.textproto @@ -0,0 +1,15 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# An ordinary Delete call. + +description: "delete: delete without precondition" +delete: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + request: < + database: "projects/projectID/databases/(default)" + writes: < + delete: "projects/projectID/databases/(default)/documents/C/d" + > + > +> diff --git a/firestore/tests/unit/v1/testdata/delete-time-precond.textproto b/firestore/tests/unit/v1/testdata/delete-time-precond.textproto new file mode 100644 index 000000000000..5798f5f3b2fc --- /dev/null +++ b/firestore/tests/unit/v1/testdata/delete-time-precond.textproto @@ -0,0 +1,25 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Delete supports a last-update-time precondition. + +description: "delete: delete with last-update-time precondition" +delete: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + precondition: < + update_time: < + seconds: 42 + > + > + request: < + database: "projects/projectID/databases/(default)" + writes: < + delete: "projects/projectID/databases/(default)/documents/C/d" + current_document: < + update_time: < + seconds: 42 + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/get-basic.textproto b/firestore/tests/unit/v1/testdata/get-basic.textproto new file mode 100644 index 000000000000..2a448168255b --- /dev/null +++ b/firestore/tests/unit/v1/testdata/get-basic.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A call to DocumentRef.Get. + +description: "get: get a document" +get: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + request: < + name: "projects/projectID/databases/(default)/documents/C/d" + > +> diff --git a/firestore/tests/unit/v1/testdata/listen-add-mod-del-add.textproto b/firestore/tests/unit/v1/testdata/listen-add-mod-del-add.textproto new file mode 100644 index 000000000000..1aa8dcbc3645 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/listen-add-mod-del-add.textproto @@ -0,0 +1,246 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Various changes to a single document. + +description: "listen: add a doc, modify it, delete it, then add it again" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + responses: < + document_delete: < + document: "projects/projectID/databases/(default)/documents/C/d1" + > + > + responses: < + target_change: < + read_time: < + seconds: 3 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + read_time: < + seconds: 4 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + read_time: < + seconds: 1 + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + changes: < + kind: MODIFIED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + > + read_time: < + seconds: 2 + > + > + snapshots: < + changes: < + kind: REMOVED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + new_index: -1 + > + read_time: < + seconds: 3 + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + old_index: -1 + > + read_time: < + seconds: 4 + > + > +> diff --git a/firestore/tests/unit/v1/testdata/listen-add-one.textproto b/firestore/tests/unit/v1/testdata/listen-add-one.textproto new file mode 100644 index 000000000000..2ad1d8e976da --- /dev/null +++ b/firestore/tests/unit/v1/testdata/listen-add-one.textproto @@ -0,0 +1,79 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Snapshot with a single document. + +description: "listen: add a doc" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + read_time: < + seconds: 2 + > + > +> diff --git a/firestore/tests/unit/v1/testdata/listen-add-three.textproto b/firestore/tests/unit/v1/testdata/listen-add-three.textproto new file mode 100644 index 000000000000..ac846f76260d --- /dev/null +++ b/firestore/tests/unit/v1/testdata/listen-add-three.textproto @@ -0,0 +1,190 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A snapshot with three documents. The documents are sorted first by the "a" +# field, then by their path. The changes are ordered the same way. + +description: "listen: add three documents" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 2 + > + read_time: < + seconds: 2 + > + > +> diff --git a/firestore/tests/unit/v1/testdata/listen-doc-remove.textproto b/firestore/tests/unit/v1/testdata/listen-doc-remove.textproto new file mode 100644 index 000000000000..975200f97363 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/listen-doc-remove.textproto @@ -0,0 +1,115 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The DocumentRemove response behaves exactly like DocumentDelete. + +description: "listen: DocumentRemove behaves like DocumentDelete" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + responses: < + document_remove: < + document: "projects/projectID/databases/(default)/documents/C/d1" + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + read_time: < + seconds: 1 + > + > + snapshots: < + changes: < + kind: REMOVED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + new_index: -1 + > + read_time: < + seconds: 2 + > + > +> diff --git a/firestore/tests/unit/v1/testdata/listen-empty.textproto b/firestore/tests/unit/v1/testdata/listen-empty.textproto new file mode 100644 index 000000000000..4d04b79096c7 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/listen-empty.textproto @@ -0,0 +1,25 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There are no changes, so the snapshot should be empty. + +description: "listen: no changes; empty snapshot" +listen: < + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + snapshots: < + read_time: < + seconds: 1 + > + > +> diff --git a/firestore/tests/unit/v1/testdata/listen-filter-nop.textproto b/firestore/tests/unit/v1/testdata/listen-filter-nop.textproto new file mode 100644 index 000000000000..48fd72d3ae12 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/listen-filter-nop.textproto @@ -0,0 +1,247 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A Filter response whose count matches the size of the current state (docs in +# last snapshot + docs added - docs deleted) is a no-op. + +description: "listen: Filter response with same size is a no-op" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_delete: < + document: "projects/projectID/databases/(default)/documents/C/d1" + > + > + responses: < + filter: < + count: 2 + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 1 + > + read_time: < + seconds: 1 + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: REMOVED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: 1 + new_index: -1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 1 + > + read_time: < + seconds: 2 + > + > +> diff --git a/firestore/tests/unit/v1/testdata/listen-multi-docs.textproto b/firestore/tests/unit/v1/testdata/listen-multi-docs.textproto new file mode 100644 index 000000000000..8778acc3d1e9 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/listen-multi-docs.textproto @@ -0,0 +1,524 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Changes should be ordered with deletes first, then additions, then mods, each in +# query order. Old indices refer to the immediately previous state, not the +# previous snapshot + +description: "listen: multiple documents, added, deleted and updated" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d4" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d5" + fields: < + key: "a" + value: < + integer_value: 4 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_delete: < + document: "projects/projectID/databases/(default)/documents/C/d3" + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: -1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + target_ids: 1 + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d6" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_delete: < + document: "projects/projectID/databases/(default)/documents/C/d2" + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d4" + fields: < + key: "a" + value: < + integer_value: -2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + read_time: < + seconds: 4 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d4" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d4" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 2 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 3 + > + read_time: < + seconds: 2 + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d4" + fields: < + key: "a" + value: < + integer_value: -2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: -1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d6" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d5" + fields: < + key: "a" + value: < + integer_value: 4 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: REMOVED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + new_index: -1 + > + changes: < + kind: REMOVED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + new_index: -1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d6" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 2 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d5" + fields: < + key: "a" + value: < + integer_value: 4 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 3 + > + changes: < + kind: MODIFIED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d4" + fields: < + key: "a" + value: < + integer_value: -2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + > + changes: < + kind: MODIFIED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: -1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + old_index: 1 + new_index: 1 + > + read_time: < + seconds: 4 + > + > +> diff --git a/firestore/tests/unit/v1/testdata/listen-nocurrent.textproto b/firestore/tests/unit/v1/testdata/listen-nocurrent.textproto new file mode 100644 index 000000000000..24239b6456f9 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/listen-nocurrent.textproto @@ -0,0 +1,141 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the watch state is not marked CURRENT, no snapshot is issued. + +description: "listen: no snapshot if we don't see CURRENT" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + old_index: -1 + new_index: 1 + > + read_time: < + seconds: 2 + > + > +> diff --git a/firestore/tests/unit/v1/testdata/listen-nomod.textproto b/firestore/tests/unit/v1/testdata/listen-nomod.textproto new file mode 100644 index 000000000000..2a99edc350c8 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/listen-nomod.textproto @@ -0,0 +1,143 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Document updates are recognized by a change in the update time, not the data. +# This shouldn't actually happen. It is just a test of the update logic. + +description: "listen: add a doc, then change it but without changing its update time" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + responses: < + document_delete: < + document: "projects/projectID/databases/(default)/documents/C/d1" + > + > + responses: < + target_change: < + read_time: < + seconds: 3 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + read_time: < + seconds: 1 + > + > + snapshots: < + changes: < + kind: REMOVED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + new_index: -1 + > + read_time: < + seconds: 3 + > + > +> diff --git a/firestore/tests/unit/v1/testdata/listen-removed-target-ids.textproto b/firestore/tests/unit/v1/testdata/listen-removed-target-ids.textproto new file mode 100644 index 000000000000..1e8ead2d8048 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/listen-removed-target-ids.textproto @@ -0,0 +1,131 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A DocumentChange with the watch target ID in the removed_target_ids field is the +# same as deleting a document. + +description: "listen: DocumentChange with removed_target_id is like a delete." +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + removed_target_ids: 1 + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + read_time: < + seconds: 1 + > + > + snapshots: < + changes: < + kind: REMOVED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + new_index: -1 + > + read_time: < + seconds: 2 + > + > +> diff --git a/firestore/tests/unit/v1/testdata/listen-reset.textproto b/firestore/tests/unit/v1/testdata/listen-reset.textproto new file mode 100644 index 000000000000..89a75df2783a --- /dev/null +++ b/firestore/tests/unit/v1/testdata/listen-reset.textproto @@ -0,0 +1,382 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A RESET message turns off the CURRENT state, and marks all documents as deleted. + +# If a document appeared on the stream but was never part of a snapshot ("d3" in +# this test), a reset will make it disappear completely. + +# For a snapshot to happen at a NO_CHANGE reponse, we need to have both seen a +# CURRENT response, and have a change from the previous snapshot. Here, after the +# reset, we see the same version of d2 again. That doesn't result in a snapshot. + +description: "listen: RESET turns off CURRENT" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: RESET + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + read_time: < + seconds: 3 + > + > + > + responses: < + target_change: < + target_change_type: RESET + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 4 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + read_time: < + seconds: 5 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + old_index: -1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 1 + > + read_time: < + seconds: 1 + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + changes: < + kind: REMOVED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: 1 + new_index: -1 + > + changes: < + kind: MODIFIED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + > + read_time: < + seconds: 3 + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + old_index: -1 + new_index: 1 + > + read_time: < + seconds: 5 + > + > +> diff --git a/firestore/tests/unit/v1/testdata/listen-target-add-nop.textproto b/firestore/tests/unit/v1/testdata/listen-target-add-nop.textproto new file mode 100644 index 000000000000..3fa7cce56e27 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/listen-target-add-nop.textproto @@ -0,0 +1,88 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A TargetChange_ADD response must have the same watch target ID. + +description: "listen: TargetChange_ADD is a no-op if it has the same target ID" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + target_change_type: ADD + target_ids: 1 + read_time: < + seconds: 2 + > + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + read_time: < + seconds: 1 + > + > +> diff --git a/firestore/tests/unit/v1/testdata/listen-target-add-wrong-id.textproto b/firestore/tests/unit/v1/testdata/listen-target-add-wrong-id.textproto new file mode 100644 index 000000000000..87544637b50b --- /dev/null +++ b/firestore/tests/unit/v1/testdata/listen-target-add-wrong-id.textproto @@ -0,0 +1,50 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A TargetChange_ADD response must have the same watch target ID. + +description: "listen: TargetChange_ADD is an error if it has a different target ID" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + target_change_type: ADD + target_ids: 2 + read_time: < + seconds: 2 + > + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/listen-target-remove.textproto b/firestore/tests/unit/v1/testdata/listen-target-remove.textproto new file mode 100644 index 000000000000..f34b0890c3f0 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/listen-target-remove.textproto @@ -0,0 +1,46 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A TargetChange_REMOVE response should never be sent. + +description: "listen: TargetChange_REMOVE should not appear" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + target_change_type: REMOVE + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-arrayremove-cursor.textproto b/firestore/tests/unit/v1/testdata/query-arrayremove-cursor.textproto new file mode 100644 index 000000000000..3c926da963e6 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-arrayremove-cursor.textproto @@ -0,0 +1,23 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# ArrayRemove is not permitted in queries. + +description: "query: ArrayRemove in cursor method" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + end_before: < + json_values: "[\"ArrayRemove\", 1, 2, 3]" + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-arrayremove-where.textproto b/firestore/tests/unit/v1/testdata/query-arrayremove-where.textproto new file mode 100644 index 000000000000..000b76350e01 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-arrayremove-where.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# ArrayRemove is not permitted in queries. + +description: "query: ArrayRemove in Where" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "==" + json_value: "[\"ArrayRemove\", 1, 2, 3]" + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-arrayunion-cursor.textproto b/firestore/tests/unit/v1/testdata/query-arrayunion-cursor.textproto new file mode 100644 index 000000000000..e8a61104d1b3 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-arrayunion-cursor.textproto @@ -0,0 +1,23 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# ArrayUnion is not permitted in queries. + +description: "query: ArrayUnion in cursor method" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + end_before: < + json_values: "[\"ArrayUnion\", 1, 2, 3]" + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-arrayunion-where.textproto b/firestore/tests/unit/v1/testdata/query-arrayunion-where.textproto new file mode 100644 index 000000000000..94923134e2b1 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-arrayunion-where.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# ArrayUnion is not permitted in queries. + +description: "query: ArrayUnion in Where" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "==" + json_value: "[\"ArrayUnion\", 1, 2, 3]" + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-bad-NaN.textproto b/firestore/tests/unit/v1/testdata/query-bad-NaN.textproto new file mode 100644 index 000000000000..6806dd04ab27 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-bad-NaN.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# You can only compare NaN for equality. + +description: "query: where clause with non-== comparison with NaN" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "<" + json_value: "\"NaN\"" + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-bad-null.textproto b/firestore/tests/unit/v1/testdata/query-bad-null.textproto new file mode 100644 index 000000000000..7fdfb3f2b5dd --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-bad-null.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# You can only compare Null for equality. + +description: "query: where clause with non-== comparison with Null" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: ">" + json_value: "null" + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-cursor-docsnap-order.textproto b/firestore/tests/unit/v1/testdata/query-cursor-docsnap-order.textproto new file mode 100644 index 000000000000..bab8601e8d6c --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-cursor-docsnap-order.textproto @@ -0,0 +1,68 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# When a document snapshot is used, the client appends a __name__ order-by clause +# with the direction of the last order-by clause. + +description: "query: cursor methods with a document snapshot, existing orderBy" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + order_by: < + path: < + field: "b" + > + direction: "desc" + > + > + clauses: < + start_after: < + doc_snapshot: < + path: "projects/projectID/databases/(default)/documents/C/D" + json_data: "{\"a\": 7, \"b\": 8}" + > + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "a" + > + direction: ASCENDING + > + order_by: < + field: < + field_path: "b" + > + direction: DESCENDING + > + order_by: < + field: < + field_path: "__name__" + > + direction: DESCENDING + > + start_at: < + values: < + integer_value: 7 + > + values: < + integer_value: 8 + > + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D" + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-cursor-docsnap-orderby-name.textproto b/firestore/tests/unit/v1/testdata/query-cursor-docsnap-orderby-name.textproto new file mode 100644 index 000000000000..d0ce3df45a2f --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-cursor-docsnap-orderby-name.textproto @@ -0,0 +1,76 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If there is an existing orderBy clause on __name__, no changes are made to the +# list of orderBy clauses. + +description: "query: cursor method, doc snapshot, existing orderBy __name__" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "desc" + > + > + clauses: < + order_by: < + path: < + field: "__name__" + > + direction: "asc" + > + > + clauses: < + start_at: < + doc_snapshot: < + path: "projects/projectID/databases/(default)/documents/C/D" + json_data: "{\"a\": 7, \"b\": 8}" + > + > + > + clauses: < + end_at: < + doc_snapshot: < + path: "projects/projectID/databases/(default)/documents/C/D" + json_data: "{\"a\": 7, \"b\": 8}" + > + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "a" + > + direction: DESCENDING + > + order_by: < + field: < + field_path: "__name__" + > + direction: ASCENDING + > + start_at: < + values: < + integer_value: 7 + > + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D" + > + before: true + > + end_at: < + values: < + integer_value: 7 + > + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D" + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-cursor-docsnap-where-eq.textproto b/firestore/tests/unit/v1/testdata/query-cursor-docsnap-where-eq.textproto new file mode 100644 index 000000000000..8b1e217df5f2 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-cursor-docsnap-where-eq.textproto @@ -0,0 +1,53 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A Where clause using equality doesn't change the implicit orderBy clauses. + +description: "query: cursor methods with a document snapshot and an equality where clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "==" + json_value: "3" + > + > + clauses: < + end_at: < + doc_snapshot: < + path: "projects/projectID/databases/(default)/documents/C/D" + json_data: "{\"a\": 7, \"b\": 8}" + > + > + > + query: < + from: < + collection_id: "C" + > + where: < + field_filter: < + field: < + field_path: "a" + > + op: EQUAL + value: < + integer_value: 3 + > + > + > + order_by: < + field: < + field_path: "__name__" + > + direction: ASCENDING + > + end_at: < + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D" + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-cursor-docsnap-where-neq-orderby.textproto b/firestore/tests/unit/v1/testdata/query-cursor-docsnap-where-neq-orderby.textproto new file mode 100644 index 000000000000..a69edfc50d11 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-cursor-docsnap-where-neq-orderby.textproto @@ -0,0 +1,72 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If there is an OrderBy clause, the inequality Where clause does not result in a +# new OrderBy clause. We still add a __name__ OrderBy clause + +description: "query: cursor method, doc snapshot, inequality where clause, and existing orderBy clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "desc" + > + > + clauses: < + where: < + path: < + field: "a" + > + op: "<" + json_value: "4" + > + > + clauses: < + start_at: < + doc_snapshot: < + path: "projects/projectID/databases/(default)/documents/C/D" + json_data: "{\"a\": 7, \"b\": 8}" + > + > + > + query: < + from: < + collection_id: "C" + > + where: < + field_filter: < + field: < + field_path: "a" + > + op: LESS_THAN + value: < + integer_value: 4 + > + > + > + order_by: < + field: < + field_path: "a" + > + direction: DESCENDING + > + order_by: < + field: < + field_path: "__name__" + > + direction: DESCENDING + > + start_at: < + values: < + integer_value: 7 + > + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D" + > + before: true + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-cursor-docsnap-where-neq.textproto b/firestore/tests/unit/v1/testdata/query-cursor-docsnap-where-neq.textproto new file mode 100644 index 000000000000..871dd0ba3392 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-cursor-docsnap-where-neq.textproto @@ -0,0 +1,64 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A Where clause with an inequality results in an OrderBy clause on that clause's +# path, if there are no other OrderBy clauses. + +description: "query: cursor method with a document snapshot and an inequality where clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "<=" + json_value: "3" + > + > + clauses: < + end_before: < + doc_snapshot: < + path: "projects/projectID/databases/(default)/documents/C/D" + json_data: "{\"a\": 7, \"b\": 8}" + > + > + > + query: < + from: < + collection_id: "C" + > + where: < + field_filter: < + field: < + field_path: "a" + > + op: LESS_THAN_OR_EQUAL + value: < + integer_value: 3 + > + > + > + order_by: < + field: < + field_path: "a" + > + direction: ASCENDING + > + order_by: < + field: < + field_path: "__name__" + > + direction: ASCENDING + > + end_at: < + values: < + integer_value: 7 + > + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D" + > + before: true + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-cursor-docsnap.textproto b/firestore/tests/unit/v1/testdata/query-cursor-docsnap.textproto new file mode 100644 index 000000000000..184bffc2d326 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-cursor-docsnap.textproto @@ -0,0 +1,34 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# When a document snapshot is used, the client appends a __name__ order-by clause. + +description: "query: cursor methods with a document snapshot" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + start_at: < + doc_snapshot: < + path: "projects/projectID/databases/(default)/documents/C/D" + json_data: "{\"a\": 7, \"b\": 8}" + > + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "__name__" + > + direction: ASCENDING + > + start_at: < + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D" + > + before: true + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-cursor-endbefore-empty-map.textproto b/firestore/tests/unit/v1/testdata/query-cursor-endbefore-empty-map.textproto new file mode 100644 index 000000000000..c197d23afe16 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-cursor-endbefore-empty-map.textproto @@ -0,0 +1,41 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Cursor methods are allowed to use empty maps with EndBefore. It should result in +# an empty map in the query. + +description: "query: EndBefore with explicit empty map" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + end_before: < + json_values: "{}" + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "a" + > + direction: ASCENDING + > + end_at: < + values: < + map_value: < + > + > + before: true + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-cursor-endbefore-empty.textproto b/firestore/tests/unit/v1/testdata/query-cursor-endbefore-empty.textproto new file mode 100644 index 000000000000..a41775abf074 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-cursor-endbefore-empty.textproto @@ -0,0 +1,23 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Cursor methods are not allowed to use empty values with EndBefore. It should +# result in an error. + +description: "query: EndBefore with empty values" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + end_before: < + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-cursor-no-order.textproto b/firestore/tests/unit/v1/testdata/query-cursor-no-order.textproto new file mode 100644 index 000000000000..fb999ddabb0f --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-cursor-no-order.textproto @@ -0,0 +1,16 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If a cursor method with a list of values is provided, there must be at least as +# many explicit orderBy clauses as values. + +description: "query: cursor method without orderBy" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + start_at: < + json_values: "2" + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-cursor-startat-empty-map.textproto b/firestore/tests/unit/v1/testdata/query-cursor-startat-empty-map.textproto new file mode 100644 index 000000000000..557aca2c9194 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-cursor-startat-empty-map.textproto @@ -0,0 +1,41 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Cursor methods are allowed to use empty maps with StartAt. It should result in +# an empty map in the query. + +description: "query: StartAt with explicit empty map" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + start_at: < + json_values: "{}" + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "a" + > + direction: ASCENDING + > + start_at: < + values: < + map_value: < + > + > + before: true + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-cursor-startat-empty.textproto b/firestore/tests/unit/v1/testdata/query-cursor-startat-empty.textproto new file mode 100644 index 000000000000..e0c54d98a6cc --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-cursor-startat-empty.textproto @@ -0,0 +1,23 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Cursor methods are not allowed to use empty values with StartAt. It should +# result in an error. + +description: "query: StartAt with empty values" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + start_at: < + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-cursor-vals-1a.textproto b/firestore/tests/unit/v1/testdata/query-cursor-vals-1a.textproto new file mode 100644 index 000000000000..bb08ab7d4d5b --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-cursor-vals-1a.textproto @@ -0,0 +1,50 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Cursor methods take the same number of values as there are OrderBy clauses. + +description: "query: StartAt/EndBefore with values" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + start_at: < + json_values: "7" + > + > + clauses: < + end_before: < + json_values: "9" + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "a" + > + direction: ASCENDING + > + start_at: < + values: < + integer_value: 7 + > + before: true + > + end_at: < + values: < + integer_value: 9 + > + before: true + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-cursor-vals-1b.textproto b/firestore/tests/unit/v1/testdata/query-cursor-vals-1b.textproto new file mode 100644 index 000000000000..41e69e9e6f14 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-cursor-vals-1b.textproto @@ -0,0 +1,48 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Cursor methods take the same number of values as there are OrderBy clauses. + +description: "query: StartAfter/EndAt with values" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + start_after: < + json_values: "7" + > + > + clauses: < + end_at: < + json_values: "9" + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "a" + > + direction: ASCENDING + > + start_at: < + values: < + integer_value: 7 + > + > + end_at: < + values: < + integer_value: 9 + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-cursor-vals-2.textproto b/firestore/tests/unit/v1/testdata/query-cursor-vals-2.textproto new file mode 100644 index 000000000000..8e37ad0035fa --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-cursor-vals-2.textproto @@ -0,0 +1,71 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Cursor methods take the same number of values as there are OrderBy clauses. + +description: "query: Start/End with two values" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + order_by: < + path: < + field: "b" + > + direction: "desc" + > + > + clauses: < + start_at: < + json_values: "7" + json_values: "8" + > + > + clauses: < + end_at: < + json_values: "9" + json_values: "10" + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "a" + > + direction: ASCENDING + > + order_by: < + field: < + field_path: "b" + > + direction: DESCENDING + > + start_at: < + values: < + integer_value: 7 + > + values: < + integer_value: 8 + > + before: true + > + end_at: < + values: < + integer_value: 9 + > + values: < + integer_value: 10 + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-cursor-vals-docid.textproto b/firestore/tests/unit/v1/testdata/query-cursor-vals-docid.textproto new file mode 100644 index 000000000000..91af3486c998 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-cursor-vals-docid.textproto @@ -0,0 +1,50 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Cursor values corresponding to a __name__ field take the document path relative +# to the query's collection. + +description: "query: cursor methods with __name__" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "__name__" + > + direction: "asc" + > + > + clauses: < + start_after: < + json_values: "\"D1\"" + > + > + clauses: < + end_before: < + json_values: "\"D2\"" + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "__name__" + > + direction: ASCENDING + > + start_at: < + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D1" + > + > + end_at: < + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D2" + > + before: true + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-cursor-vals-last-wins.textproto b/firestore/tests/unit/v1/testdata/query-cursor-vals-last-wins.textproto new file mode 100644 index 000000000000..9e8fbb19f336 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-cursor-vals-last-wins.textproto @@ -0,0 +1,60 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# When multiple Start* or End* calls occur, the values of the last one are used. + +description: "query: cursor methods, last one wins" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + start_after: < + json_values: "1" + > + > + clauses: < + start_at: < + json_values: "2" + > + > + clauses: < + end_at: < + json_values: "3" + > + > + clauses: < + end_before: < + json_values: "4" + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "a" + > + direction: ASCENDING + > + start_at: < + values: < + integer_value: 2 + > + before: true + > + end_at: < + values: < + integer_value: 4 + > + before: true + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-del-cursor.textproto b/firestore/tests/unit/v1/testdata/query-del-cursor.textproto new file mode 100644 index 000000000000..c9d4adb7c5dc --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-del-cursor.textproto @@ -0,0 +1,23 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Sentinel values are not permitted in queries. + +description: "query: Delete in cursor method" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + end_before: < + json_values: "\"Delete\"" + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-del-where.textproto b/firestore/tests/unit/v1/testdata/query-del-where.textproto new file mode 100644 index 000000000000..8e92529492ea --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-del-where.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Sentinel values are not permitted in queries. + +description: "query: Delete in Where" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "==" + json_value: "\"Delete\"" + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-invalid-operator.textproto b/firestore/tests/unit/v1/testdata/query-invalid-operator.textproto new file mode 100644 index 000000000000..e580c64a759f --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-invalid-operator.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The != operator is not supported. + +description: "query: invalid operator in Where clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "!=" + json_value: "4" + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-invalid-path-order.textproto b/firestore/tests/unit/v1/testdata/query-invalid-path-order.textproto new file mode 100644 index 000000000000..e0a72057620c --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-invalid-path-order.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The path has an empty component. + +description: "query: invalid path in OrderBy clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "*" + field: "" + > + direction: "asc" + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-invalid-path-select.textproto b/firestore/tests/unit/v1/testdata/query-invalid-path-select.textproto new file mode 100644 index 000000000000..944f984f7fa9 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-invalid-path-select.textproto @@ -0,0 +1,18 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The path has an empty component. + +description: "query: invalid path in Where clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + select: < + fields: < + field: "*" + field: "" + > + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-invalid-path-where.textproto b/firestore/tests/unit/v1/testdata/query-invalid-path-where.textproto new file mode 100644 index 000000000000..527923b09799 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-invalid-path-where.textproto @@ -0,0 +1,20 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The path has an empty component. + +description: "query: invalid path in Where clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "*" + field: "" + > + op: "==" + json_value: "4" + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-offset-limit-last-wins.textproto b/firestore/tests/unit/v1/testdata/query-offset-limit-last-wins.textproto new file mode 100644 index 000000000000..dc301f439e8d --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-offset-limit-last-wins.textproto @@ -0,0 +1,30 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# With multiple Offset or Limit clauses, the last one wins. + +description: "query: multiple Offset and Limit clauses" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + offset: 2 + > + clauses: < + limit: 3 + > + clauses: < + limit: 4 + > + clauses: < + offset: 5 + > + query: < + from: < + collection_id: "C" + > + offset: 5 + limit: < + value: 4 + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-offset-limit.textproto b/firestore/tests/unit/v1/testdata/query-offset-limit.textproto new file mode 100644 index 000000000000..136d9d46a615 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-offset-limit.textproto @@ -0,0 +1,24 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Offset and Limit clauses. + +description: "query: Offset and Limit clauses" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + offset: 2 + > + clauses: < + limit: 3 + > + query: < + from: < + collection_id: "C" + > + offset: 2 + limit: < + value: 3 + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-order.textproto b/firestore/tests/unit/v1/testdata/query-order.textproto new file mode 100644 index 000000000000..7ed4c4ead840 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-order.textproto @@ -0,0 +1,42 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Multiple OrderBy clauses combine. + +description: "query: basic OrderBy clauses" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "b" + > + direction: "asc" + > + > + clauses: < + order_by: < + path: < + field: "a" + > + direction: "desc" + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "b" + > + direction: ASCENDING + > + order_by: < + field: < + field_path: "a" + > + direction: DESCENDING + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-select-empty.textproto b/firestore/tests/unit/v1/testdata/query-select-empty.textproto new file mode 100644 index 000000000000..def8b55ac515 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-select-empty.textproto @@ -0,0 +1,23 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# An empty Select clause selects just the document ID. + +description: "query: empty Select clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + select: < + > + > + query: < + select: < + fields: < + field_path: "__name__" + > + > + from: < + collection_id: "C" + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-select-last-wins.textproto b/firestore/tests/unit/v1/testdata/query-select-last-wins.textproto new file mode 100644 index 000000000000..bd78d09eb9b8 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-select-last-wins.textproto @@ -0,0 +1,36 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The last Select clause is the only one used. + +description: "query: two Select clauses" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + select: < + fields: < + field: "a" + > + fields: < + field: "b" + > + > + > + clauses: < + select: < + fields: < + field: "c" + > + > + > + query: < + select: < + fields: < + field_path: "c" + > + > + from: < + collection_id: "C" + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-select.textproto b/firestore/tests/unit/v1/testdata/query-select.textproto new file mode 100644 index 000000000000..15e11249730c --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-select.textproto @@ -0,0 +1,32 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# An ordinary Select clause. + +description: "query: Select clause with some fields" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + select: < + fields: < + field: "a" + > + fields: < + field: "b" + > + > + > + query: < + select: < + fields: < + field_path: "a" + > + fields: < + field_path: "b" + > + > + from: < + collection_id: "C" + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-st-cursor.textproto b/firestore/tests/unit/v1/testdata/query-st-cursor.textproto new file mode 100644 index 000000000000..66885d0dd5dc --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-st-cursor.textproto @@ -0,0 +1,23 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Sentinel values are not permitted in queries. + +description: "query: ServerTimestamp in cursor method" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + end_before: < + json_values: "\"ServerTimestamp\"" + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-st-where.textproto b/firestore/tests/unit/v1/testdata/query-st-where.textproto new file mode 100644 index 000000000000..05da28d54291 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-st-where.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Sentinel values are not permitted in queries. + +description: "query: ServerTimestamp in Where" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "==" + json_value: "\"ServerTimestamp\"" + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/query-where-2.textproto b/firestore/tests/unit/v1/testdata/query-where-2.textproto new file mode 100644 index 000000000000..1034463079e1 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-where-2.textproto @@ -0,0 +1,59 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Multiple Where clauses are combined into a composite filter. + +description: "query: two Where clauses" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: ">=" + json_value: "5" + > + > + clauses: < + where: < + path: < + field: "b" + > + op: "<" + json_value: "\"foo\"" + > + > + query: < + from: < + collection_id: "C" + > + where: < + composite_filter: < + op: AND + filters: < + field_filter: < + field: < + field_path: "a" + > + op: GREATER_THAN_OR_EQUAL + value: < + integer_value: 5 + > + > + > + filters: < + field_filter: < + field: < + field_path: "b" + > + op: LESS_THAN + value: < + string_value: "foo" + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-where-NaN.textproto b/firestore/tests/unit/v1/testdata/query-where-NaN.textproto new file mode 100644 index 000000000000..4a97ca7dde1f --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-where-NaN.textproto @@ -0,0 +1,31 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A Where clause that tests for equality with NaN results in a unary filter. + +description: "query: a Where clause comparing to NaN" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "==" + json_value: "\"NaN\"" + > + > + query: < + from: < + collection_id: "C" + > + where: < + unary_filter: < + op: IS_NAN + field: < + field_path: "a" + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-where-null.textproto b/firestore/tests/unit/v1/testdata/query-where-null.textproto new file mode 100644 index 000000000000..1869c60c72aa --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-where-null.textproto @@ -0,0 +1,31 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A Where clause that tests for equality with null results in a unary filter. + +description: "query: a Where clause comparing to null" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "==" + json_value: "null" + > + > + query: < + from: < + collection_id: "C" + > + where: < + unary_filter: < + op: IS_NULL + field: < + field_path: "a" + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-where.textproto b/firestore/tests/unit/v1/testdata/query-where.textproto new file mode 100644 index 000000000000..045c2befab88 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-where.textproto @@ -0,0 +1,34 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A simple Where clause. + +description: "query: Where clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: ">" + json_value: "5" + > + > + query: < + from: < + collection_id: "C" + > + where: < + field_filter: < + field: < + field_path: "a" + > + op: GREATER_THAN + value: < + integer_value: 5 + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/query-wrong-collection.textproto b/firestore/tests/unit/v1/testdata/query-wrong-collection.textproto new file mode 100644 index 000000000000..ad6f353d5fc9 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/query-wrong-collection.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If a document snapshot is passed to a Start*/End* method, it must be in the same +# collection as the query. + +description: "query: doc snapshot with wrong collection in cursor method" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + end_before: < + doc_snapshot: < + path: "projects/projectID/databases/(default)/documents/C2/D" + json_data: "{\"a\": 7, \"b\": 8}" + > + > + > + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/set-all-transforms.textproto b/firestore/tests/unit/v1/testdata/set-all-transforms.textproto new file mode 100644 index 000000000000..bf18f9a5b12a --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-all-transforms.textproto @@ -0,0 +1,61 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can be created with any amount of transforms. + +description: "set: all transforms in a single call" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": [\"ArrayUnion\", 1, 2, 3], \"d\": [\"ArrayRemove\", 4, 5, 6]}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + field_transforms: < + field_path: "c" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + field_transforms: < + field_path: "d" + remove_all_from_array: < + values: < + integer_value: 4 + > + values: < + integer_value: 5 + > + values: < + integer_value: 6 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-arrayremove-multi.textproto b/firestore/tests/unit/v1/testdata/set-arrayremove-multi.textproto new file mode 100644 index 000000000000..9b62fe191953 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-arrayremove-multi.textproto @@ -0,0 +1,58 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can have more than one ArrayRemove field. Since all the ArrayRemove +# fields are removed, the only field in the update is "a". + +description: "set: multiple ArrayRemove fields" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3], \"c\": {\"d\": [\"ArrayRemove\", 4, 5, 6]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + remove_all_from_array: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + field_transforms: < + field_path: "c.d" + remove_all_from_array: < + values: < + integer_value: 4 + > + values: < + integer_value: 5 + > + values: < + integer_value: 6 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-arrayremove-nested.textproto b/firestore/tests/unit/v1/testdata/set-arrayremove-nested.textproto new file mode 100644 index 000000000000..617609c5a39e --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-arrayremove-nested.textproto @@ -0,0 +1,45 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# An ArrayRemove value can occur at any depth. In this case, the transform applies +# to the field path "b.c". Since "c" is removed from the update, "b" becomes +# empty, so it is also removed from the update. + +description: "set: nested ArrayRemove field" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayRemove\", 1, 2, 3]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b.c" + remove_all_from_array: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-arrayremove-noarray-nested.textproto b/firestore/tests/unit/v1/testdata/set-arrayremove-noarray-nested.textproto new file mode 100644 index 000000000000..2efa34a59f19 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-arrayremove-noarray-nested.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There cannot be an array value anywhere on the path from the document root to +# the ArrayRemove. Firestore transforms don't support array indexing. + +description: "set: ArrayRemove cannot be anywhere inside an array value" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": [\"ArrayRemove\", 1, 2, 3]}]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/set-arrayremove-noarray.textproto b/firestore/tests/unit/v1/testdata/set-arrayremove-noarray.textproto new file mode 100644 index 000000000000..e7aa209ea22b --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-arrayremove-noarray.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# ArrayRemove must be the value of a field. Firestore transforms don't support +# array indexing. + +description: "set: ArrayRemove cannot be in an array value" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/set-arrayremove-with-st.textproto b/firestore/tests/unit/v1/testdata/set-arrayremove-with-st.textproto new file mode 100644 index 000000000000..353025b59ff5 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-arrayremove-with-st.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The ServerTimestamp sentinel must be the value of a field. It may not appear in +# an ArrayUnion. + +description: "set: The ServerTimestamp sentinel cannot be in an ArrayUnion" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [\"ArrayRemove\", 1, \"ServerTimestamp\", 3]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/set-arrayremove.textproto b/firestore/tests/unit/v1/testdata/set-arrayremove.textproto new file mode 100644 index 000000000000..8aa6b60d0156 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-arrayremove.textproto @@ -0,0 +1,44 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A key with ArrayRemove is removed from the data in the update operation. Instead +# it appears in a separate Transform operation. + +description: "set: ArrayRemove with data" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3]}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + remove_all_from_array: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-arrayunion-multi.textproto b/firestore/tests/unit/v1/testdata/set-arrayunion-multi.textproto new file mode 100644 index 000000000000..e515bfa8d188 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-arrayunion-multi.textproto @@ -0,0 +1,58 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can have more than one ArrayUnion field. Since all the ArrayUnion +# fields are removed, the only field in the update is "a". + +description: "set: multiple ArrayUnion fields" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3], \"c\": {\"d\": [\"ArrayUnion\", 4, 5, 6]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + field_transforms: < + field_path: "c.d" + append_missing_elements: < + values: < + integer_value: 4 + > + values: < + integer_value: 5 + > + values: < + integer_value: 6 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-arrayunion-nested.textproto b/firestore/tests/unit/v1/testdata/set-arrayunion-nested.textproto new file mode 100644 index 000000000000..f8abeb0d0004 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-arrayunion-nested.textproto @@ -0,0 +1,45 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# An ArrayUnion value can occur at any depth. In this case, the transform applies +# to the field path "b.c". Since "c" is removed from the update, "b" becomes +# empty, so it is also removed from the update. + +description: "set: nested ArrayUnion field" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayUnion\", 1, 2, 3]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b.c" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-arrayunion-noarray-nested.textproto b/firestore/tests/unit/v1/testdata/set-arrayunion-noarray-nested.textproto new file mode 100644 index 000000000000..2b4170f431a3 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-arrayunion-noarray-nested.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There cannot be an array value anywhere on the path from the document root to +# the ArrayUnion. Firestore transforms don't support array indexing. + +description: "set: ArrayUnion cannot be anywhere inside an array value" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": [\"ArrayUnion\", 1, 2, 3]}]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/set-arrayunion-noarray.textproto b/firestore/tests/unit/v1/testdata/set-arrayunion-noarray.textproto new file mode 100644 index 000000000000..e08af3a07f14 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-arrayunion-noarray.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# ArrayUnion must be the value of a field. Firestore transforms don't support +# array indexing. + +description: "set: ArrayUnion cannot be in an array value" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/set-arrayunion-with-st.textproto b/firestore/tests/unit/v1/testdata/set-arrayunion-with-st.textproto new file mode 100644 index 000000000000..37a7a132e750 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-arrayunion-with-st.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The ServerTimestamp sentinel must be the value of a field. It may not appear in +# an ArrayUnion. + +description: "set: The ServerTimestamp sentinel cannot be in an ArrayUnion" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [\"ArrayUnion\", 1, \"ServerTimestamp\", 3]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/set-arrayunion.textproto b/firestore/tests/unit/v1/testdata/set-arrayunion.textproto new file mode 100644 index 000000000000..4751e0c0e322 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-arrayunion.textproto @@ -0,0 +1,44 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A key with ArrayUnion is removed from the data in the update operation. Instead +# it appears in a separate Transform operation. + +description: "set: ArrayUnion with data" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3]}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-basic.textproto b/firestore/tests/unit/v1/testdata/set-basic.textproto new file mode 100644 index 000000000000..e9b292e3cdc3 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-basic.textproto @@ -0,0 +1,24 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A simple call, resulting in a single update operation. + +description: "set: basic" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-complex.textproto b/firestore/tests/unit/v1/testdata/set-complex.textproto new file mode 100644 index 000000000000..6ec19500a2d0 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-complex.textproto @@ -0,0 +1,58 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A call to a write method with complicated input data. + +description: "set: complex" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2.5], \"b\": {\"c\": [\"three\", {\"d\": true}]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + array_value: < + values: < + integer_value: 1 + > + values: < + double_value: 2.5 + > + > + > + > + fields: < + key: "b" + value: < + map_value: < + fields: < + key: "c" + value: < + array_value: < + values: < + string_value: "three" + > + values: < + map_value: < + fields: < + key: "d" + value: < + boolean_value: true + > + > + > + > + > + > + > + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-del-merge-alone.textproto b/firestore/tests/unit/v1/testdata/set-del-merge-alone.textproto new file mode 100644 index 000000000000..811ab8dfe7bb --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-del-merge-alone.textproto @@ -0,0 +1,28 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A Delete sentinel can appear with a merge option. If the delete paths are the +# only ones to be merged, then no document is sent, just an update mask. + +description: "set-merge: Delete with merge" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "b" + field: "c" + > + > + json_data: "{\"a\": 1, \"b\": {\"c\": \"Delete\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + > + update_mask: < + field_paths: "b.c" + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-del-merge.textproto b/firestore/tests/unit/v1/testdata/set-del-merge.textproto new file mode 100644 index 000000000000..b8d8631051e7 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-del-merge.textproto @@ -0,0 +1,37 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A Delete sentinel can appear with a merge option. + +description: "set-merge: Delete with merge" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "a" + > + fields: < + field: "b" + field: "c" + > + > + json_data: "{\"a\": 1, \"b\": {\"c\": \"Delete\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b.c" + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-del-mergeall.textproto b/firestore/tests/unit/v1/testdata/set-del-mergeall.textproto new file mode 100644 index 000000000000..af1e84524bca --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-del-mergeall.textproto @@ -0,0 +1,31 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A Delete sentinel can appear with a mergeAll option. + +description: "set: Delete with MergeAll" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + all: true + > + json_data: "{\"a\": 1, \"b\": {\"c\": \"Delete\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b.c" + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-del-noarray-nested.textproto b/firestore/tests/unit/v1/testdata/set-del-noarray-nested.textproto new file mode 100644 index 000000000000..bbf6a3d00af3 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-del-noarray-nested.textproto @@ -0,0 +1,13 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a field. Deletes are implemented by +# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not +# support array indexing. + +description: "set: Delete cannot be anywhere inside an array value" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": \"Delete\"}]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/set-del-noarray.textproto b/firestore/tests/unit/v1/testdata/set-del-noarray.textproto new file mode 100644 index 000000000000..07fc6497dc35 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-del-noarray.textproto @@ -0,0 +1,13 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a field. Deletes are implemented by +# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not +# support array indexing. + +description: "set: Delete cannot be in an array value" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, \"Delete\"]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/set-del-nomerge.textproto b/firestore/tests/unit/v1/testdata/set-del-nomerge.textproto new file mode 100644 index 000000000000..cb6ef4f85870 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-del-nomerge.textproto @@ -0,0 +1,17 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The client signals an error if the Delete sentinel is in the input data, but not +# selected by a merge option, because this is most likely a programming bug. + +description: "set-merge: Delete cannot appear in an unmerged field" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "a" + > + > + json_data: "{\"a\": 1, \"b\": \"Delete\"}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/set-del-nonleaf.textproto b/firestore/tests/unit/v1/testdata/set-del-nonleaf.textproto new file mode 100644 index 000000000000..54f22d95c521 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-del-nonleaf.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If a Delete is part of the value at a merge path, then the user is confused: +# their merge path says "replace this entire value" but their Delete says "delete +# this part of the value". This should be an error, just as if they specified +# Delete in a Set with no merge. + +description: "set-merge: Delete cannot appear as part of a merge path" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "h" + > + > + json_data: "{\"h\": {\"g\": \"Delete\"}}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/set-del-wo-merge.textproto b/firestore/tests/unit/v1/testdata/set-del-wo-merge.textproto new file mode 100644 index 000000000000..29196628bfd8 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-del-wo-merge.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Without a merge option, Set replaces the document with the input data. A Delete +# sentinel in the data makes no sense in this case. + +description: "set: Delete cannot appear unless a merge option is specified" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"Delete\"}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/set-empty.textproto b/firestore/tests/unit/v1/testdata/set-empty.textproto new file mode 100644 index 000000000000..c2b73d3ff933 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-empty.textproto @@ -0,0 +1,17 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + + +description: "set: creating or setting an empty map" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-merge-fp.textproto b/firestore/tests/unit/v1/testdata/set-merge-fp.textproto new file mode 100644 index 000000000000..68690f6f1633 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-merge-fp.textproto @@ -0,0 +1,40 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A merge with fields that use special characters. + +description: "set-merge: Merge with FieldPaths" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "*" + field: "~" + > + > + json_data: "{\"*\": {\"~\": true}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "*" + value: < + map_value: < + fields: < + key: "~" + value: < + boolean_value: true + > + > + > + > + > + > + update_mask: < + field_paths: "`*`.`~`" + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-merge-nested.textproto b/firestore/tests/unit/v1/testdata/set-merge-nested.textproto new file mode 100644 index 000000000000..0d1282818d76 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-merge-nested.textproto @@ -0,0 +1,41 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A merge option where the field is not at top level. Only fields mentioned in the +# option are present in the update operation. + +description: "set-merge: Merge with a nested field" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "h" + field: "g" + > + > + json_data: "{\"h\": {\"g\": 4, \"f\": 5}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "h" + value: < + map_value: < + fields: < + key: "g" + value: < + integer_value: 4 + > + > + > + > + > + > + update_mask: < + field_paths: "h.g" + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-merge-nonleaf.textproto b/firestore/tests/unit/v1/testdata/set-merge-nonleaf.textproto new file mode 100644 index 000000000000..ca41cb03402d --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-merge-nonleaf.textproto @@ -0,0 +1,46 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If a field path is in a merge option, the value at that path replaces the stored +# value. That is true even if the value is complex. + +description: "set-merge: Merge field is not a leaf" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "h" + > + > + json_data: "{\"h\": {\"f\": 5, \"g\": 6}, \"e\": 7}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "h" + value: < + map_value: < + fields: < + key: "f" + value: < + integer_value: 5 + > + > + fields: < + key: "g" + value: < + integer_value: 6 + > + > + > + > + > + > + update_mask: < + field_paths: "h" + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-merge-prefix.textproto b/firestore/tests/unit/v1/testdata/set-merge-prefix.textproto new file mode 100644 index 000000000000..1e2c2c50226e --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-merge-prefix.textproto @@ -0,0 +1,21 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The prefix would make the other path meaningless, so this is probably a +# programming error. + +description: "set-merge: One merge path cannot be the prefix of another" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "a" + > + fields: < + field: "a" + field: "b" + > + > + json_data: "{\"a\": {\"b\": 1}}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/set-merge-present.textproto b/firestore/tests/unit/v1/testdata/set-merge-present.textproto new file mode 100644 index 000000000000..f6665de5cdc3 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-merge-present.textproto @@ -0,0 +1,20 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The client signals an error if a merge option mentions a path that is not in the +# input data. + +description: "set-merge: Merge fields must all be present in data" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "b" + > + fields: < + field: "a" + > + > + json_data: "{\"a\": 1}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/set-merge.textproto b/firestore/tests/unit/v1/testdata/set-merge.textproto new file mode 100644 index 000000000000..279125253cb1 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-merge.textproto @@ -0,0 +1,32 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Fields in the input data but not in a merge option are pruned. + +description: "set-merge: Merge with a field" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "a" + > + > + json_data: "{\"a\": 1, \"b\": 2}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-mergeall-empty.textproto b/firestore/tests/unit/v1/testdata/set-mergeall-empty.textproto new file mode 100644 index 000000000000..16df8a22bed3 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-mergeall-empty.textproto @@ -0,0 +1,23 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# This is a valid call that can be used to ensure a document exists. + +description: "set: MergeAll can be specified with empty data." +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + all: true + > + json_data: "{}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + > + update_mask: < + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-mergeall-nested.textproto b/firestore/tests/unit/v1/testdata/set-mergeall-nested.textproto new file mode 100644 index 000000000000..1fbc6973cd28 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-mergeall-nested.textproto @@ -0,0 +1,45 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# MergeAll with nested fields results in an update mask that includes entries for +# all the leaf fields. + +description: "set: MergeAll with nested fields" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + all: true + > + json_data: "{\"h\": { \"g\": 3, \"f\": 4 }}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "h" + value: < + map_value: < + fields: < + key: "f" + value: < + integer_value: 4 + > + > + fields: < + key: "g" + value: < + integer_value: 3 + > + > + > + > + > + > + update_mask: < + field_paths: "h.f" + field_paths: "h.g" + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-mergeall.textproto b/firestore/tests/unit/v1/testdata/set-mergeall.textproto new file mode 100644 index 000000000000..cb2ebc52bc06 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-mergeall.textproto @@ -0,0 +1,37 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The MergeAll option with a simple piece of data. + +description: "set: MergeAll" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + all: true + > + json_data: "{\"a\": 1, \"b\": 2}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + fields: < + key: "b" + value: < + integer_value: 2 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-nodel.textproto b/firestore/tests/unit/v1/testdata/set-nodel.textproto new file mode 100644 index 000000000000..0fb887d461be --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-nodel.textproto @@ -0,0 +1,11 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel cannot be used in Create, or in Set without a Merge option. + +description: "set: Delete cannot appear in data" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"Delete\"}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/set-nosplit.textproto b/firestore/tests/unit/v1/testdata/set-nosplit.textproto new file mode 100644 index 000000000000..0ff3fadcf4ba --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-nosplit.textproto @@ -0,0 +1,37 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Create and Set treat their map keys literally. They do not split on dots. + +description: "set: don\342\200\231t split on dots" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{ \"a.b\": { \"c.d\": 1 }, \"e\": 2 }" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a.b" + value: < + map_value: < + fields: < + key: "c.d" + value: < + integer_value: 1 + > + > + > + > + > + fields: < + key: "e" + value: < + integer_value: 2 + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-special-chars.textproto b/firestore/tests/unit/v1/testdata/set-special-chars.textproto new file mode 100644 index 000000000000..f4122c9f004c --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-special-chars.textproto @@ -0,0 +1,38 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Create and Set treat their map keys literally. They do not escape special +# characters. + +description: "set: non-alpha characters in map keys" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{ \"*\": { \".\": 1 }, \"~\": 2 }" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "*" + value: < + map_value: < + fields: < + key: "." + value: < + integer_value: 1 + > + > + > + > + > + fields: < + key: "~" + value: < + integer_value: 2 + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-st-alone-mergeall.textproto b/firestore/tests/unit/v1/testdata/set-st-alone-mergeall.textproto new file mode 100644 index 000000000000..16ce4cfbd913 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-st-alone-mergeall.textproto @@ -0,0 +1,26 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the only values in the input are ServerTimestamps, then no update operation +# should be produced. + +description: "set: ServerTimestamp alone with MergeAll" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + all: true + > + json_data: "{\"a\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-st-alone.textproto b/firestore/tests/unit/v1/testdata/set-st-alone.textproto new file mode 100644 index 000000000000..6ce46d7f1ab5 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-st-alone.textproto @@ -0,0 +1,28 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the only values in the input are ServerTimestamps, then an update operation +# with an empty map should be produced. + +description: "set: ServerTimestamp alone" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-st-merge-both.textproto b/firestore/tests/unit/v1/testdata/set-st-merge-both.textproto new file mode 100644 index 000000000000..5cc7bbc9efbf --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-st-merge-both.textproto @@ -0,0 +1,45 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Just as when no merge option is specified, ServerTimestamp sentinel values are +# removed from the data in the update operation and become transforms. + +description: "set-merge: ServerTimestamp with Merge of both fields" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "a" + > + fields: < + field: "b" + > + > + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-st-merge-nonleaf-alone.textproto b/firestore/tests/unit/v1/testdata/set-st-merge-nonleaf-alone.textproto new file mode 100644 index 000000000000..f513b6c804c5 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-st-merge-nonleaf-alone.textproto @@ -0,0 +1,37 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If a field path is in a merge option, the value at that path replaces the stored +# value. If the value has only ServerTimestamps, they become transforms and we +# clear the value by including the field path in the update mask. + +description: "set-merge: non-leaf merge field with ServerTimestamp alone" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "h" + > + > + json_data: "{\"h\": {\"g\": \"ServerTimestamp\"}, \"e\": 7}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + > + update_mask: < + field_paths: "h" + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "h.g" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-st-merge-nonleaf.textproto b/firestore/tests/unit/v1/testdata/set-st-merge-nonleaf.textproto new file mode 100644 index 000000000000..e53e7e2682eb --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-st-merge-nonleaf.textproto @@ -0,0 +1,49 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If a field path is in a merge option, the value at that path replaces the stored +# value, and ServerTimestamps inside that value become transforms as usual. + +description: "set-merge: non-leaf merge field with ServerTimestamp" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "h" + > + > + json_data: "{\"h\": {\"f\": 5, \"g\": \"ServerTimestamp\"}, \"e\": 7}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "h" + value: < + map_value: < + fields: < + key: "f" + value: < + integer_value: 5 + > + > + > + > + > + > + update_mask: < + field_paths: "h" + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "h.g" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-st-merge-nowrite.textproto b/firestore/tests/unit/v1/testdata/set-st-merge-nowrite.textproto new file mode 100644 index 000000000000..3222230dc510 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-st-merge-nowrite.textproto @@ -0,0 +1,28 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If all the fields in the merge option have ServerTimestamp values, then no +# update operation is produced, only a transform. + +description: "set-merge: If no ordinary values in Merge, no write" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "b" + > + > + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-st-mergeall.textproto b/firestore/tests/unit/v1/testdata/set-st-mergeall.textproto new file mode 100644 index 000000000000..b8c53a566fdd --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-st-mergeall.textproto @@ -0,0 +1,40 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Just as when no merge option is specified, ServerTimestamp sentinel values are +# removed from the data in the update operation and become transforms. + +description: "set: ServerTimestamp with MergeAll" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + all: true + > + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-st-multi.textproto b/firestore/tests/unit/v1/testdata/set-st-multi.textproto new file mode 100644 index 000000000000..375ec18d68fd --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-st-multi.textproto @@ -0,0 +1,38 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can have more than one ServerTimestamp field. Since all the +# ServerTimestamp fields are removed, the only field in the update is "a". + +description: "set: multiple ServerTimestamp fields" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": {\"d\": \"ServerTimestamp\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + field_transforms: < + field_path: "c.d" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-st-nested.textproto b/firestore/tests/unit/v1/testdata/set-st-nested.textproto new file mode 100644 index 000000000000..abfd2e8fd874 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-st-nested.textproto @@ -0,0 +1,35 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A ServerTimestamp value can occur at any depth. In this case, the transform +# applies to the field path "b.c". Since "c" is removed from the update, "b" +# becomes empty, so it is also removed from the update. + +description: "set: nested ServerTimestamp field" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": {\"c\": \"ServerTimestamp\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b.c" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-st-noarray-nested.textproto b/firestore/tests/unit/v1/testdata/set-st-noarray-nested.textproto new file mode 100644 index 000000000000..241d79151a42 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-st-noarray-nested.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There cannot be an array value anywhere on the path from the document root to +# the ServerTimestamp sentinel. Firestore transforms don't support array indexing. + +description: "set: ServerTimestamp cannot be anywhere inside an array value" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": \"ServerTimestamp\"}]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/set-st-noarray.textproto b/firestore/tests/unit/v1/testdata/set-st-noarray.textproto new file mode 100644 index 000000000000..591fb0343854 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-st-noarray.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The ServerTimestamp sentinel must be the value of a field. Firestore transforms +# don't support array indexing. + +description: "set: ServerTimestamp cannot be in an array value" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, \"ServerTimestamp\"]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/set-st-nomerge.textproto b/firestore/tests/unit/v1/testdata/set-st-nomerge.textproto new file mode 100644 index 000000000000..20c0ae1fbb0e --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-st-nomerge.textproto @@ -0,0 +1,33 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the ServerTimestamp value is not mentioned in a merge option, then it is +# pruned from the data but does not result in a transform. + +description: "set-merge: If is ServerTimestamp not in Merge, no transform" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "a" + > + > + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-st-with-empty-map.textproto b/firestore/tests/unit/v1/testdata/set-st-with-empty-map.textproto new file mode 100644 index 000000000000..5e187983f995 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-st-with-empty-map.textproto @@ -0,0 +1,42 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# When a ServerTimestamp and a map both reside inside a map, the ServerTimestamp +# should be stripped out but the empty map should remain. + +description: "set: ServerTimestamp beside an empty map" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": {\"b\": {}, \"c\": \"ServerTimestamp\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + map_value: < + fields: < + key: "b" + value: < + map_value: < + > + > + > + > + > + > + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a.c" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/set-st.textproto b/firestore/tests/unit/v1/testdata/set-st.textproto new file mode 100644 index 000000000000..8bceddceeacc --- /dev/null +++ b/firestore/tests/unit/v1/testdata/set-st.textproto @@ -0,0 +1,36 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A key with the special ServerTimestamp sentinel is removed from the data in the +# update operation. Instead it appears in a separate Transform operation. Note +# that in these tests, the string "ServerTimestamp" should be replaced with the +# special ServerTimestamp value. + +description: "set: ServerTimestamp with data" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/test-suite.binproto b/firestore/tests/unit/v1/testdata/test-suite.binproto new file mode 100644 index 0000000000000000000000000000000000000000..6e3ce397375224cab4ee93e9ae05495a182bc983 GIT binary patch literal 55916 zcmdsA3v?V;dCtsAwtVd*$~X^u5|YWpBxWs3(b%yQ5tDdhyF{^LVkaaJf<4+DS(~hO zm7SFxV>S@Vqoh0=punL(p@s4ew1onN@@NaC6bfx9l=g6-htdO-LMi1OP7D3+Yjz&H ztJynaIcm;{w3e-V|NGtl|L=eQ*Cj8~$ogYmYs$hG%e5+v^5VQ#Zy9y6eWBU7*DJUD z3Gw0PJrfnT<<7Xi=TB^|c(d+et@We{`78d!O%oMkhuZIv&uZlCa?^8L-jszIs%7(o zn%Ypt$SK>kr>x1g*&tV@TZFOK)<@1FHz>}ynrN<}k80#yIyqOaqTHBYsCf^VuhkVt z-Nx5(_vD!6j9+ulm}SpkS*PqWzTR!O=9->okKxZ1{JGU!^xF5d+vYp9)N5|DHJ?zV z?n1ie^QO-wblqzCBO3X# zMm7gn(Vef>k6DeT<$Epm(XCtF{6g!bHSaE%Z&PIk{Z!kWO%2KQ3=&lyen=-zkSnS* z>fd?(8(NmXP^-4AMjevX^389lF5|LOmhFzS{kQD$INkC|s|}X$@X4{-CgeCRuiwoU zd57Y@pHB)P#5mhoOV*GANTG~xU{^hS(8&w+&aT!Q^{sAgVa~P6b8gcux4forRqJ#^ zt^2%_zWEQTa9j*HidZ?mjR#OXD4=*KJrs8`C~i}PVw7k_L-6#tgyBI3!%4D9a>5?3 z=CwSl?AGfIsMv)C&uvl^s4g{Cr@lz&IH445K8fA7H1fs*xk}FM)@o15rPF30VG3%N!YsWop^K0q1FdxlrcurZ73x=WaAOYt=-8t)+Hl%W( zj~3{UDpT}FL->Cf|F0q+)XC*?b7GiTC#tPE&K19@x-B>j*wH81A~uEHHXmJfWN&bG zWkQyGQYV*4%sT88g6nrw;kq&km*B@Zx3itt_TJaEd_q%LCEA*gUm8fCZEtd3cU^;=T)qjY+18auU0W%lM|BM%(G*Y*s$Hlc$z@W81%4q>=Fq)E6}qiaCX^O(XM)ht0{}m{>;N8cZc&ux z^@IW#ljkq$#Oekn7dyd-?^H!|ybqd5!Cr}Q#$@|jddFaz$Cb82e_RT+3MfHX# zstLgs1{1%{CH@v%`_X0R+NV1E6uDNIM622T0Mh?Uo!F9L1B5%`$vGC+0ab8siGthX z9FIU8`!|8<**3qj>_8uNo~;OU@~}6cF>onk8lz)&K2#uAbl-rm&hX7|sP4dlt~-!W z7u~iX_(v;v526}H>_N=*|B_C&$UTT1>(-3tS1XLj2Zqi#Co zqqXZHL^{D8|PNx$_Vgu z=^fuX?$#DP^O=eyikmBQc6LFLLHImUlq5x+DqShb=vtAK2+s;5;1P|COA0znl;E#0 z9}CU|NI+EhKL7bj?w-aAIp+colJNm5rPTY{{LBF7cu zWF?gw<2er8?`!1x2$cz@TY^kak<2YU$P8<>5S`!FIzIV07rM1Z-8278)d{_UQHPn^m_Tp)z|WH5S(r)iBvFG-9%)bd8bO-&Cc4 zsTi3(NMMMlYWF4LWB()fajD>#{Z&;Q`US`AcIpj2OjIqXK5?<*h2a&9WkV`lDLEKpeaa@?rJh+mES2pKnNO zMS&kx9^J!PpF^T213#`vy2P?s}*@{6dbHE@L zuF>zxN~&T6%N#U<1zKOBk*m8hXrH)Da=i51mf^kZ={P)3kvrkc1fHScfE`w$Ct4Z_&uECAg-4V8d1nS(&LqRzU6L@C$b5 zqa_Ga^k|y>;#HvMts1$uvx_WwHWU$Ef+odal{rAL3M3*Fd0mIZ1fwlMqhh4W93)Z& zB2g#A-Qw{Bm4&J7|5kMe_Y6}F68TW}_8Xb}UNpM(PpAxIn|Hu#9`knBYW5ctp`c|) z<|vQa(cA}GPUE!g=caZ89x_)Ukp&6X$_51LWRU+ds?&lQUNZJ#X~fFq@w9B=ZV|UK zL@||o1#HC|PuX)=DQabo;oCMWd=}rgTkW>}()32t5tER@-1sd0a!ffiu#sqU#`%sx z%fMy<#1Q>(3?B;QUl6IqK5w0;+dO44I5-wAPY|>}t%}01Q2^XswtgY?*6%av5ZQyB zztY%6yz45%dv2oyMs3SS3^3^&P^Iw@w()G=kzL33{TgihsnpB9%^c<%p21?ClbK(1 zpNGiHxGN%x0OND3>RYTv%Bp$ptPO@xLx64foOzjCK4o_l7BLuh$fCKOZTki3x&JP@ z{d2jo{jg;FRRqycdWns~!JO0kXhFf#+}0c}v8qroW+)QvLDM+JE9PukCdbM}+xGKP z^T8b^DmRZ7kB%RG@Tju#3&#afCO4uH#58QC+v8^*13+p-W%n{ue@h2~P-&bt5R{V| z$aX5C+>HDX)w5lZW7Y!Kn*n~xDK7ND>}@|(Q^S_o2>1%TI3X$N%`R4B)|zRckWQSO zuUB;c7=)b}^vjIA-AS7vnek(5At&67iz-aJe7ZyZ^wkC8RYSZNH}xl>MRe??U( ziQ{-_!Ce01`2{-J7R3ZhZZs>^y4w^fuB?o@-Mum-L zs?Y=3nx~c-7c8z|V$5Q{PgU5&i489gH(0_ac2e4Kl+&_zoZtf#W*+1xcpi2PHC%_Y zWw67B2GC#C$yA8jQUh)AE8!y<;fAZGV;}Z|`Q7Oc$Xy&}#oVwwK>ZI* zP|Tpb{qeef*g`ba-h2Ijt-5|@KBrpx0P2}usp!5d$_V*7^8K+~aYcqhgu2^MLWEu; zUbzapr^2-}@18>2OpM#Vr-}e&dzu(pNzK|*=2^;1Ll=|q@ykwGVaZwZi$2H0kUn2QS#N7>=3_+dq%3?=HfQj6s@J(yjT8=wwN5&FPq z`60Bql1mXu`$g4qMYW{;N7Gv*FVdvfmM#8_W$+`f{TvAz=hq@wLg`%YndxkwNX-ui zBB*3$3FMZXA6*?cQj342x<(;FL2IKyic&hfu6oG+8}jt!dTOrU+RFd1wS^W(RBewG=?%w4J7;W)Tc2#c`_bSe@7P@tu^+KDZley$PIhj!BS4DQ~kzPhi?g z$>}kLlW7VJ%qC!~U{lN`SFx>lSaB7I_)yyA=|h*oTE?-Nd~%t@D{9@quHq4h;?or8 z#i87pHxOKkAUrv8(!CE<{T7UZkPK1Vsai(?akWxZ?KwdW)vyUi3;*PlF+YXSOsZbd zJ|5A*3jegMg?~TN$OgH_P+AA`4ir^yy7HFStNhbGnOaXBq$h5cPF8NL^UaTOw~s7X z#Pa!yl`EzYDWZJatIE2)tV@n2-;uYVou6Ft62(i!pDH?p%+p-F1k)#NfF(yWb-AjT zXo-Z}p}3|As!WQ?B@#=!$p^@s&J*5DK^^L6rX>;*xRCnlV5VFmk+!G_83kv8F>|eM zq9qa$6opRgz^Ggzk-ETXMdBD!w7_wb#IawoQ}IozS~dn}?_#F`N_O(S#%1%r^TH9fm`HW#BUB=%BYIUhFtnG zs!ye7{lgtEYlf_f)6*AFqN-R@Rb0N3D%uw?E49m5RfWjtHc4)rW&4!w6i;Vi75V~7 zk`f+G-tp1xbSNxl>@L%-wu<%zloW){w0?NFis#$_dis@YjTRGyT}T`_MMW*1WzqN+i9w^qJ04+NnM$239^=_)Qa{6j8K_ix+Tb5rfRDn zXygJ(XXK3@b-(#K7CRcJ&2}f-_QUB9(xc{TGK{bjewBvj6%~|$+8USD)*#P*M%G-E zk*lbYx9FWihG!YJ@EDJUw_2L)ttI+C4-#6?hV%Z#1|ByUv%Ck&-a-qhBo#c`Q*Jc9 zqHwH5QNpdmO!l>AOXE&Z%~?h%_v7JW>X@yu98WNWR{B&79;w~ zl}>)9kta!|5kI=PyEP2Y=Tzajq@zfZS-JN8sbTpwii5LTZIg^OLMK6exSgn*2-9|6 z^eXOA8{Z^Y<%6^YlST2rh<6A%v|CvQf@o&yD5sCGY$t89cxE!B5EBx|i(HP#lX8^3 z$8ptw_$YwMoo3OD>Ae7bH)qK2{_X z3OPGL*fgi#JdW}_OrlYfKr?Nz7(UN@uk~zm4k^MFnx|qTw}>BBe7X}dJy0@y;+Du` zUyfpGItLLskaSY1wA#S4MaH80)<2)hQ9Pbm>8y}5%=%qTdX43JXQzMr~;SF82`4aJNt&;BIOT4H@T2=83W>x){CL^|TbJ?QB&7H6MZ!@iW zL$B1TWnFgEB~RX7kXJsr>?QK6oS&%1M3^SQml?_gW%{K;6<3nq?Oyz0Cm@v1CDDs{ z+KW=2=|9W#V(Q0&5Ut#$xaJ8GU7S~B-k&3do z0pizrGH9+@R)3|(55TaF>_GXuxj>l^cDPE|qb4g8%b4N?mLgyyxw2orm80A|GSIqI zs?o-x`!i_86`gJMH9c;K_Q<(%-kS?&q6C`4*u#ox7gug-P%PCP9g{`LLX1hna?kRv z9M!f5%p#;;*ewQ0=pkge@fCi!b_GKllm8zCzIF+1kP#F=+Zn*VgAV@cHaA0fu7%vk;&iG6n?D|C}Gtz03g{DuluvQz6ovZ5LN+#!pm% zh>V}uL#!_^$OuA$^%HuP9`JIzMMOCPW677wfO%a3+n?K*&9VxF&E33&7k}3t72+~#vEf?j6-(YJ@9co znj6K(vQWIai?5^?$+Xu&6y(_2Lj4XB$v%!^uLG)6TzjKTgo@2*Qz(BOy|<%uCETM) zat>xeZB&xOCt_6mHhfl5DnbWdpX4m(NES*gK}nZwTnl6+ja%GYrop#dR zp^<%2(qbDNVyXVAJkGPS!1x3Tqi}!6D6rd<$B^E(D@tgB$CjW^F_DDx|M?;M1kW+D z-l>t@QL?%+-4dkDWP$FA4rxj4C*DQ{fN17l>OyIP36~(XoQ2d)2~qW2GHcKoHC(D(X?DAJEGgOPd0?L z#F}}^7H!cONnvxor@DT$c*a6VVHWvG$j_*cH(;pmY!_db-o;#_DO*P|3Np4Z3{Tlo zZT52+qsQ3bTuAV)CWe!lN}voGIjafMwnSZ&&y{N3a9y zoy)%;rPoKz84Qf|WQ4WNeb&*VK?B`P=}E})E6NVqXrc>7LBi5emY#$bJfdfb1fwGp zKJN)u0q3+l2N54NM8F`kjRyLcxlQad`@D*eNx|u4S0E!gZA_wSUO-l3&~c1O2>l=i zq~fI5$HSo9(r8F-!N?4$Bk|x~JjSltkiir|_2Y4Odu0*1gk|)63acm8_8O5}oy)n= z*0w>Fe?CZ8mOI~VV$K3Id9l;Q)puT|$_p`adfxX_M7iW;6HCF>=L*gMQx^Zu$ZjmQ zA}vFvU_z;}@X^$qxx-MJf&8OJCc7-b>YEcYrQD*A_Ys;+y{zkP_8W?eL!#bQ-4^F1 z+qz8QX%Hm@S@OXE9S5mGRb0+}#JqpeYo464xGp#h1zB!c<(j+bd!+-!S!g!y#ZUYR z@!{q@6SSq`jO%;;#O8`O>n_$>PnxJS%8S$t@h5IFbaIxto~%O)>5(Hx>h8RE{UGFKBT{=RZz1iuqe7 z8ff(#TB=`qM)o=Jss&6T8sT0?&^^%5%6#4hvR=%4CwlC85?!?!&o9r3L%ZG_wsG9p z=2lM3m1GkbG9YP)lEnWxwqg|h=pNSC(fLBilAzMH!ZcbmThwh<>&K9KZq*lSHO1`_ zG~%g|UAxR7I*>WaX|AUjNk&dI2e z%LEk%XJ^q?(sJt+YkzgVD#g(+-K;ddZs~@;xnQnkD=+FJg{yVaCY!}C@J=s5(_INFNrGwwbusLah{7|!(2`Bz zOHa)K7%bnSnV`0qEx!8P=bNVU!IIh(UEV}&VsvI(^!(C74oe(%zvMEkkiBV?8C>sJrt4s9j8iIs@2tQDQ zIDgV(RgeVNtcvX*1XxIl5a1bu39vEZtfJs)*BWa|tAU@PPhKs~94X8~s%J*^DMCrc z|04htg-#$`A0@Q5M?zX&Z2Bl}n)h0Bjf%J+N`s~J?`rg~r*;Pk**#R?N2XR8L-w9Y z+ZdTz{Q)#%2|-sXmL~2^6Tz` zKi6oL9#h-P{f_EOse26Z%-T*`(FGsUwQcjZ?)yDOb`(`E33~v=mvwUEU{KJ)Mzo)# zh814*pa*UVE~MNAb^lbJ$W^1dNFLz29hDt@>P9K?s+f|`=;W5c5M@>xnvQ$-FQR`Z z&chR76Ce51D&TfZZJ4bPjs2xO@q9MJvvXw_hJ9X$fvJ`7q{gBh6tCwfPUnf@d<;dMp~#F&7;_0YpTc-k5mLzd4|Q@^H&Dl@z6(^{82ZR| z#c+&a92b6h6Mdx`z)|~}U-;-X2NyzZyOFQ@jKhs*8AgZXsh?B@S>b-HKz5~pJAr6m zKs-`V68#vaUG0fnk4aW@@CHN$sjOYMdixVr~q! zv_|02i!UvDoma(@`q9|p*u@zu^tv0V>|~)Xa%t4;QUa6PqnPGo;EIWb-UB8nJzZ&r zON~o9o}gr408<|$7YeI(Oc{V)9a0{ zd)*?JCiXfy{h=JDUpI{#rYxbv@?W9_4wmT7N2hjknEK-BjYc0f!1K%UE*|59C@Fb3 z-7c9njebn==Ng$3n-MmsxXpT^*}#pd+^Err0ekGIZbeDN%S|eV{(2aw*PuvZh6$>6eCCQmdo(C z&as`+=}vJD(aCl8PDKU~LlknM4KY4OTcJbS(2C(7&3IFGP7ij+*pDb)R!$E_qwHeB#!m{Wc0h7;)sd15R~;#^ ze1bj6!obNDh8Wm*hPu&{TySV$OUp8J5`pm0^ytyaM~282DHkwUrG;u$XRQ++#Z*h6c9iY&v<&AUh=I1c^K{0_wx-U#ZY`rraRMWd@OYelAz0-)Lmn>tNM^VAH+M zpc|7Sm~p@WzF=Q64!B)vZ^_=tTU-_Zu93sDd#Jo5DqlIak-N;NqnWPag%#aD(VBiV zV!Tq*i`_XgZ1WDe$_U>muhPk~%I&nrDP83++YRpm>5l2t<&oh$<ekkyd^2zd8gPbIE5Eq!kJk!l5fG9G27@|zW2jVM-4>^yA z4fUH(K(;g^vfrTsrpxun)LsAl|J*_I06l-pDQt;&Z%QjVcSI%i-O z2X&HA9Qy_1r#Y@%^*~LsdY1i8juzkVVe%d1Q%Q}yN2K_QTKySs&OPq=R;_xUXUV_g z=@ZlM$v5PZxbmR`iIXuXluPVS^0JmipaWhGMhoo&crPypaWH&HP?$U8ts`VoZt}ir zjXu)S^!$ZJ-4_)wKFSF#zxo`{s-pTI(cP_$h3W6-6HyGP%IGHu#8#^7E=-f*j0}V& zGQ9E)dXfwk>Ej+E-jU}qrL3ote3*r+llu|k>MUw2NoHtZaZG(l`K(Toomp829Fxm| z7syNWz{_w=NwRhvP@TMJh}Md8^^uM5ft(r^IAu$$UG1y12Q>S zUc&hFr%(H#kU_qaT;+695b}s62K$ypsFGBMwrV+!if^L$Me@PzY_B;N9NE2RPv~%< zF%(3p=UktrqFC(w;H+WYyl47#`K0v(h|-gTUnkZYYj9j@8ZwKdZq-)K=8+F;WKUmo zQSs7j(yZwKUWiw9b{6&RXhzgQxSq=$xi`Ri7~l=8nsaMc!Vm2|aPaASEfj<;)+#U@ XkdCyl;JMA|W2cKk$_T;UZ_xe^@qaSD literal 0 HcmV?d00001 diff --git a/firestore/tests/unit/v1/testdata/update-all-transforms.textproto b/firestore/tests/unit/v1/testdata/update-all-transforms.textproto new file mode 100644 index 000000000000..225cc61e405e --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-all-transforms.textproto @@ -0,0 +1,67 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can be created with any amount of transforms. + +description: "update: all transforms in a single call" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": [\"ArrayUnion\", 1, 2, 3], \"d\": [\"ArrayRemove\", 4, 5, 6]}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + field_transforms: < + field_path: "c" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + field_transforms: < + field_path: "d" + remove_all_from_array: < + values: < + integer_value: 4 + > + values: < + integer_value: 5 + > + values: < + integer_value: 6 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-arrayremove-alone.textproto b/firestore/tests/unit/v1/testdata/update-arrayremove-alone.textproto new file mode 100644 index 000000000000..8c79a31d5052 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-arrayremove-alone.textproto @@ -0,0 +1,36 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the only values in the input are ArrayRemove, then no update operation should +# be produced. + +description: "update: ArrayRemove alone" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [\"ArrayRemove\", 1, 2, 3]}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a" + remove_all_from_array: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-arrayremove-multi.textproto b/firestore/tests/unit/v1/testdata/update-arrayremove-multi.textproto new file mode 100644 index 000000000000..2362b6e09458 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-arrayremove-multi.textproto @@ -0,0 +1,69 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can have more than one ArrayRemove field. Since all the ArrayRemove +# fields are removed, the only field in the update is "a". + +# b is not in the mask because it will be set in the transform. c must be in the +# mask: it should be replaced entirely. The transform will set c.d to the +# timestamp, but the update will delete the rest of c. + +description: "update: multiple ArrayRemove fields" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3], \"c\": {\"d\": [\"ArrayRemove\", 4, 5, 6]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "c" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + remove_all_from_array: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + field_transforms: < + field_path: "c.d" + remove_all_from_array: < + values: < + integer_value: 4 + > + values: < + integer_value: 5 + > + values: < + integer_value: 6 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-arrayremove-nested.textproto b/firestore/tests/unit/v1/testdata/update-arrayremove-nested.textproto new file mode 100644 index 000000000000..143790179eaf --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-arrayremove-nested.textproto @@ -0,0 +1,52 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# An ArrayRemove value can occur at any depth. In this case, the transform applies +# to the field path "b.c". Since "c" is removed from the update, "b" becomes +# empty, so it is also removed from the update. + +description: "update: nested ArrayRemove field" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayRemove\", 1, 2, 3]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b.c" + remove_all_from_array: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-arrayremove-noarray-nested.textproto b/firestore/tests/unit/v1/testdata/update-arrayremove-noarray-nested.textproto new file mode 100644 index 000000000000..04eca965c688 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-arrayremove-noarray-nested.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There cannot be an array value anywhere on the path from the document root to +# the ArrayRemove. Firestore transforms don't support array indexing. + +description: "update: ArrayRemove cannot be anywhere inside an array value" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": [\"ArrayRemove\", 1, 2, 3]}]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-arrayremove-noarray.textproto b/firestore/tests/unit/v1/testdata/update-arrayremove-noarray.textproto new file mode 100644 index 000000000000..bbd27bf017e1 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-arrayremove-noarray.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# ArrayRemove must be the value of a field. Firestore transforms don't support +# array indexing. + +description: "update: ArrayRemove cannot be in an array value" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-arrayremove-with-st.textproto b/firestore/tests/unit/v1/testdata/update-arrayremove-with-st.textproto new file mode 100644 index 000000000000..4888b44f1c01 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-arrayremove-with-st.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The ServerTimestamp sentinel must be the value of a field. It may not appear in +# an ArrayUnion. + +description: "update: The ServerTimestamp sentinel cannot be in an ArrayUnion" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [\"ArrayRemove\", 1, \"ServerTimestamp\", 3]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-arrayremove.textproto b/firestore/tests/unit/v1/testdata/update-arrayremove.textproto new file mode 100644 index 000000000000..3b767cf486c3 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-arrayremove.textproto @@ -0,0 +1,50 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A key with ArrayRemove is removed from the data in the update operation. Instead +# it appears in a separate Transform operation. + +description: "update: ArrayRemove with data" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3]}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + remove_all_from_array: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-arrayunion-alone.textproto b/firestore/tests/unit/v1/testdata/update-arrayunion-alone.textproto new file mode 100644 index 000000000000..ec12818da74c --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-arrayunion-alone.textproto @@ -0,0 +1,36 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the only values in the input are ArrayUnion, then no update operation should +# be produced. + +description: "update: ArrayUnion alone" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [\"ArrayUnion\", 1, 2, 3]}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-arrayunion-multi.textproto b/firestore/tests/unit/v1/testdata/update-arrayunion-multi.textproto new file mode 100644 index 000000000000..8edf6a3af046 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-arrayunion-multi.textproto @@ -0,0 +1,69 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can have more than one ArrayUnion field. Since all the ArrayUnion +# fields are removed, the only field in the update is "a". + +# b is not in the mask because it will be set in the transform. c must be in the +# mask: it should be replaced entirely. The transform will set c.d to the +# timestamp, but the update will delete the rest of c. + +description: "update: multiple ArrayUnion fields" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3], \"c\": {\"d\": [\"ArrayUnion\", 4, 5, 6]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "c" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + field_transforms: < + field_path: "c.d" + append_missing_elements: < + values: < + integer_value: 4 + > + values: < + integer_value: 5 + > + values: < + integer_value: 6 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-arrayunion-nested.textproto b/firestore/tests/unit/v1/testdata/update-arrayunion-nested.textproto new file mode 100644 index 000000000000..217e2e2ca775 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-arrayunion-nested.textproto @@ -0,0 +1,52 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# An ArrayUnion value can occur at any depth. In this case, the transform applies +# to the field path "b.c". Since "c" is removed from the update, "b" becomes +# empty, so it is also removed from the update. + +description: "update: nested ArrayUnion field" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayUnion\", 1, 2, 3]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b.c" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-arrayunion-noarray-nested.textproto b/firestore/tests/unit/v1/testdata/update-arrayunion-noarray-nested.textproto new file mode 100644 index 000000000000..0326781830ec --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-arrayunion-noarray-nested.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There cannot be an array value anywhere on the path from the document root to +# the ArrayUnion. Firestore transforms don't support array indexing. + +description: "update: ArrayUnion cannot be anywhere inside an array value" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": [\"ArrayUnion\", 1, 2, 3]}]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-arrayunion-noarray.textproto b/firestore/tests/unit/v1/testdata/update-arrayunion-noarray.textproto new file mode 100644 index 000000000000..c199f9f73c91 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-arrayunion-noarray.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# ArrayUnion must be the value of a field. Firestore transforms don't support +# array indexing. + +description: "update: ArrayUnion cannot be in an array value" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-arrayunion-with-st.textproto b/firestore/tests/unit/v1/testdata/update-arrayunion-with-st.textproto new file mode 100644 index 000000000000..ee022f8492bc --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-arrayunion-with-st.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The ServerTimestamp sentinel must be the value of a field. It may not appear in +# an ArrayUnion. + +description: "update: The ServerTimestamp sentinel cannot be in an ArrayUnion" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [\"ArrayUnion\", 1, \"ServerTimestamp\", 3]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-arrayunion.textproto b/firestore/tests/unit/v1/testdata/update-arrayunion.textproto new file mode 100644 index 000000000000..81b240b891bb --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-arrayunion.textproto @@ -0,0 +1,50 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A key with ArrayUnion is removed from the data in the update operation. Instead +# it appears in a separate Transform operation. + +description: "update: ArrayUnion with data" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3]}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-badchar.textproto b/firestore/tests/unit/v1/testdata/update-badchar.textproto new file mode 100644 index 000000000000..656ff53b686a --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-badchar.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The keys of the data given to Update are interpreted, unlike those of Create and +# Set. They cannot contain special characters. + +description: "update: invalid character" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a~b\": 1}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-basic.textproto b/firestore/tests/unit/v1/testdata/update-basic.textproto new file mode 100644 index 000000000000..9da316f58ebe --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-basic.textproto @@ -0,0 +1,30 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A simple call, resulting in a single update operation. + +description: "update: basic" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-complex.textproto b/firestore/tests/unit/v1/testdata/update-complex.textproto new file mode 100644 index 000000000000..1a6d9eff64b9 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-complex.textproto @@ -0,0 +1,65 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A call to a write method with complicated input data. + +description: "update: complex" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2.5], \"b\": {\"c\": [\"three\", {\"d\": true}]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + array_value: < + values: < + integer_value: 1 + > + values: < + double_value: 2.5 + > + > + > + > + fields: < + key: "b" + value: < + map_value: < + fields: < + key: "c" + value: < + array_value: < + values: < + string_value: "three" + > + values: < + map_value: < + fields: < + key: "d" + value: < + boolean_value: true + > + > + > + > + > + > + > + > + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-del-alone.textproto b/firestore/tests/unit/v1/testdata/update-del-alone.textproto new file mode 100644 index 000000000000..8f558233f037 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-del-alone.textproto @@ -0,0 +1,25 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the input data consists solely of Deletes, then the update operation has no +# map, just an update mask. + +description: "update: Delete alone" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": \"Delete\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-del-dot.textproto b/firestore/tests/unit/v1/testdata/update-del-dot.textproto new file mode 100644 index 000000000000..c0ebdf61f787 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-del-dot.textproto @@ -0,0 +1,46 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# After expanding top-level dotted fields, fields with Delete values are pruned +# from the output data, but appear in the update mask. + +description: "update: Delete with a dotted field" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b.c\": \"Delete\", \"b.d\": 2}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + fields: < + key: "b" + value: < + map_value: < + fields: < + key: "d" + value: < + integer_value: 2 + > + > + > + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b.c" + field_paths: "b.d" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-del-nested.textproto b/firestore/tests/unit/v1/testdata/update-del-nested.textproto new file mode 100644 index 000000000000..ed102697e682 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-del-nested.textproto @@ -0,0 +1,11 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a top-level key. + +description: "update: Delete cannot be nested" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": {\"b\": \"Delete\"}}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-del-noarray-nested.textproto b/firestore/tests/unit/v1/testdata/update-del-noarray-nested.textproto new file mode 100644 index 000000000000..a2eec49661c0 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-del-noarray-nested.textproto @@ -0,0 +1,13 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a field. Deletes are implemented by +# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not +# support array indexing. + +description: "update: Delete cannot be anywhere inside an array value" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": \"Delete\"}]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-del-noarray.textproto b/firestore/tests/unit/v1/testdata/update-del-noarray.textproto new file mode 100644 index 000000000000..a7eea87ef49f --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-del-noarray.textproto @@ -0,0 +1,13 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a field. Deletes are implemented by +# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not +# support array indexing. + +description: "update: Delete cannot be in an array value" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, \"Delete\"]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-del.textproto b/firestore/tests/unit/v1/testdata/update-del.textproto new file mode 100644 index 000000000000..ec443e6c7035 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-del.textproto @@ -0,0 +1,32 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If a field's value is the Delete sentinel, then it doesn't appear in the update +# data, but does in the mask. + +description: "update: Delete" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"Delete\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-exists-precond.textproto b/firestore/tests/unit/v1/testdata/update-exists-precond.textproto new file mode 100644 index 000000000000..3c6fef4e2263 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-exists-precond.textproto @@ -0,0 +1,14 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Update method does not support an explicit exists precondition. + +description: "update: Exists precondition is invalid" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + precondition: < + exists: true + > + json_data: "{\"a\": 1}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-fp-empty-component.textproto b/firestore/tests/unit/v1/testdata/update-fp-empty-component.textproto new file mode 100644 index 000000000000..c3bceff3e4b8 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-fp-empty-component.textproto @@ -0,0 +1,11 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Empty fields are not allowed. + +description: "update: empty field path component" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a..b\": 1}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-no-paths.textproto b/firestore/tests/unit/v1/testdata/update-no-paths.textproto new file mode 100644 index 000000000000..b524b7483f79 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-no-paths.textproto @@ -0,0 +1,11 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# It is a client-side error to call Update with empty data. + +description: "update: no paths" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-all-transforms.textproto b/firestore/tests/unit/v1/testdata/update-paths-all-transforms.textproto new file mode 100644 index 000000000000..8cfad4732034 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-all-transforms.textproto @@ -0,0 +1,82 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can be created with any amount of transforms. + +description: "update-paths: all transforms in a single call" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + field_paths: < + field: "c" + > + field_paths: < + field: "d" + > + json_values: "1" + json_values: "\"ServerTimestamp\"" + json_values: "[\"ArrayUnion\", 1, 2, 3]" + json_values: "[\"ArrayRemove\", 4, 5, 6]" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + field_transforms: < + field_path: "c" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + field_transforms: < + field_path: "d" + remove_all_from_array: < + values: < + integer_value: 4 + > + values: < + integer_value: 5 + > + values: < + integer_value: 6 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-arrayremove-alone.textproto b/firestore/tests/unit/v1/testdata/update-paths-arrayremove-alone.textproto new file mode 100644 index 000000000000..68f0e147b2de --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-arrayremove-alone.textproto @@ -0,0 +1,39 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the only values in the input are ArrayRemove, then no update operation should +# be produced. + +description: "update-paths: ArrayRemove alone" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "[\"ArrayRemove\", 1, 2, 3]" + request: < + database: "projects/projectID/databases/(default)" + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a" + remove_all_from_array: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-arrayremove-multi.textproto b/firestore/tests/unit/v1/testdata/update-paths-arrayremove-multi.textproto new file mode 100644 index 000000000000..b60c3f36a6c0 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-arrayremove-multi.textproto @@ -0,0 +1,76 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can have more than one ArrayRemove field. Since all the ArrayRemove +# fields are removed, the only field in the update is "a". + +description: "update-paths: multiple ArrayRemove fields" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + field_paths: < + field: "c" + > + json_values: "1" + json_values: "[\"ArrayRemove\", 1, 2, 3]" + json_values: "{\"d\": [\"ArrayRemove\", 4, 5, 6]}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "c" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + remove_all_from_array: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + field_transforms: < + field_path: "c.d" + remove_all_from_array: < + values: < + integer_value: 4 + > + values: < + integer_value: 5 + > + values: < + integer_value: 6 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-arrayremove-nested.textproto b/firestore/tests/unit/v1/testdata/update-paths-arrayremove-nested.textproto new file mode 100644 index 000000000000..381be19d553f --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-arrayremove-nested.textproto @@ -0,0 +1,59 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# An ArrayRemove value can occur at any depth. In this case, the transform applies +# to the field path "b.c". Since "c" is removed from the update, "b" becomes +# empty, so it is also removed from the update. + +description: "update-paths: nested ArrayRemove field" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + json_values: "1" + json_values: "{\"c\": [\"ArrayRemove\", 1, 2, 3]}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b.c" + remove_all_from_array: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-arrayremove-noarray-nested.textproto b/firestore/tests/unit/v1/testdata/update-paths-arrayremove-noarray-nested.textproto new file mode 100644 index 000000000000..35f6c67b2e56 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-arrayremove-noarray-nested.textproto @@ -0,0 +1,15 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There cannot be an array value anywhere on the path from the document root to +# the ArrayRemove. Firestore transforms don't support array indexing. + +description: "update-paths: ArrayRemove cannot be anywhere inside an array value" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "[1, {\"b\": [\"ArrayRemove\", 1, 2, 3]}]" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-arrayremove-noarray.textproto b/firestore/tests/unit/v1/testdata/update-paths-arrayremove-noarray.textproto new file mode 100644 index 000000000000..45cab48dd9e1 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-arrayremove-noarray.textproto @@ -0,0 +1,15 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# ArrayRemove must be the value of a field. Firestore transforms don't support +# array indexing. + +description: "update-paths: ArrayRemove cannot be in an array value" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "[1, 2, [\"ArrayRemove\", 1, 2, 3]]" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-arrayremove-with-st.textproto b/firestore/tests/unit/v1/testdata/update-paths-arrayremove-with-st.textproto new file mode 100644 index 000000000000..67b92a3ef3b9 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-arrayremove-with-st.textproto @@ -0,0 +1,15 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The ServerTimestamp sentinel must be the value of a field. It may not appear in +# an ArrayUnion. + +description: "update-paths: The ServerTimestamp sentinel cannot be in an ArrayUnion" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "[\"ArrayRemove\", 1, \"ServerTimestamp\", 3]" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-arrayremove.textproto b/firestore/tests/unit/v1/testdata/update-paths-arrayremove.textproto new file mode 100644 index 000000000000..d3866676ede0 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-arrayremove.textproto @@ -0,0 +1,57 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A key with ArrayRemove is removed from the data in the update operation. Instead +# it appears in a separate Transform operation. + +description: "update-paths: ArrayRemove with data" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + json_values: "1" + json_values: "[\"ArrayRemove\", 1, 2, 3]" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + remove_all_from_array: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-arrayunion-alone.textproto b/firestore/tests/unit/v1/testdata/update-paths-arrayunion-alone.textproto new file mode 100644 index 000000000000..48100e0abceb --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-arrayunion-alone.textproto @@ -0,0 +1,39 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the only values in the input are ArrayUnion, then no update operation should +# be produced. + +description: "update-paths: ArrayUnion alone" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "[\"ArrayUnion\", 1, 2, 3]" + request: < + database: "projects/projectID/databases/(default)" + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-arrayunion-multi.textproto b/firestore/tests/unit/v1/testdata/update-paths-arrayunion-multi.textproto new file mode 100644 index 000000000000..03772e5ddd1a --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-arrayunion-multi.textproto @@ -0,0 +1,76 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can have more than one ArrayUnion field. Since all the ArrayUnion +# fields are removed, the only field in the update is "a". + +description: "update-paths: multiple ArrayUnion fields" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + field_paths: < + field: "c" + > + json_values: "1" + json_values: "[\"ArrayUnion\", 1, 2, 3]" + json_values: "{\"d\": [\"ArrayUnion\", 4, 5, 6]}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "c" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + field_transforms: < + field_path: "c.d" + append_missing_elements: < + values: < + integer_value: 4 + > + values: < + integer_value: 5 + > + values: < + integer_value: 6 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-arrayunion-nested.textproto b/firestore/tests/unit/v1/testdata/update-paths-arrayunion-nested.textproto new file mode 100644 index 000000000000..1420e4e2806b --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-arrayunion-nested.textproto @@ -0,0 +1,59 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# An ArrayUnion value can occur at any depth. In this case, the transform applies +# to the field path "b.c". Since "c" is removed from the update, "b" becomes +# empty, so it is also removed from the update. + +description: "update-paths: nested ArrayUnion field" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + json_values: "1" + json_values: "{\"c\": [\"ArrayUnion\", 1, 2, 3]}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b.c" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-arrayunion-noarray-nested.textproto b/firestore/tests/unit/v1/testdata/update-paths-arrayunion-noarray-nested.textproto new file mode 100644 index 000000000000..ab75bf38a3ae --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-arrayunion-noarray-nested.textproto @@ -0,0 +1,15 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There cannot be an array value anywhere on the path from the document root to +# the ArrayUnion. Firestore transforms don't support array indexing. + +description: "update-paths: ArrayUnion cannot be anywhere inside an array value" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "[1, {\"b\": [\"ArrayUnion\", 1, 2, 3]}]" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-arrayunion-noarray.textproto b/firestore/tests/unit/v1/testdata/update-paths-arrayunion-noarray.textproto new file mode 100644 index 000000000000..fac72644fc38 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-arrayunion-noarray.textproto @@ -0,0 +1,15 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# ArrayUnion must be the value of a field. Firestore transforms don't support +# array indexing. + +description: "update-paths: ArrayUnion cannot be in an array value" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "[1, 2, [\"ArrayRemove\", 1, 2, 3]]" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-arrayunion-with-st.textproto b/firestore/tests/unit/v1/testdata/update-paths-arrayunion-with-st.textproto new file mode 100644 index 000000000000..d194c09bd775 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-arrayunion-with-st.textproto @@ -0,0 +1,15 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The ServerTimestamp sentinel must be the value of a field. It may not appear in +# an ArrayUnion. + +description: "update-paths: The ServerTimestamp sentinel cannot be in an ArrayUnion" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "[\"ArrayUnion\", 1, \"ServerTimestamp\", 3]" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-arrayunion.textproto b/firestore/tests/unit/v1/testdata/update-paths-arrayunion.textproto new file mode 100644 index 000000000000..fc56c1e29471 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-arrayunion.textproto @@ -0,0 +1,57 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A key with ArrayUnion is removed from the data in the update operation. Instead +# it appears in a separate Transform operation. + +description: "update-paths: ArrayUnion with data" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + json_values: "1" + json_values: "[\"ArrayUnion\", 1, 2, 3]" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + append_missing_elements: < + values: < + integer_value: 1 + > + values: < + integer_value: 2 + > + values: < + integer_value: 3 + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-basic.textproto b/firestore/tests/unit/v1/testdata/update-paths-basic.textproto new file mode 100644 index 000000000000..515f29d6af02 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-basic.textproto @@ -0,0 +1,33 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A simple call, resulting in a single update operation. + +description: "update-paths: basic" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "1" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-complex.textproto b/firestore/tests/unit/v1/testdata/update-paths-complex.textproto new file mode 100644 index 000000000000..38a832239f5c --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-complex.textproto @@ -0,0 +1,72 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A call to a write method with complicated input data. + +description: "update-paths: complex" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + json_values: "[1, 2.5]" + json_values: "{\"c\": [\"three\", {\"d\": true}]}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + array_value: < + values: < + integer_value: 1 + > + values: < + double_value: 2.5 + > + > + > + > + fields: < + key: "b" + value: < + map_value: < + fields: < + key: "c" + value: < + array_value: < + values: < + string_value: "three" + > + values: < + map_value: < + fields: < + key: "d" + value: < + boolean_value: true + > + > + > + > + > + > + > + > + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-del-alone.textproto b/firestore/tests/unit/v1/testdata/update-paths-del-alone.textproto new file mode 100644 index 000000000000..5dbb787de94b --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-del-alone.textproto @@ -0,0 +1,28 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the input data consists solely of Deletes, then the update operation has no +# map, just an update mask. + +description: "update-paths: Delete alone" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "\"Delete\"" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-del-nested.textproto b/firestore/tests/unit/v1/testdata/update-paths-del-nested.textproto new file mode 100644 index 000000000000..bdf65fb0ad91 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-del-nested.textproto @@ -0,0 +1,14 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a top-level key. + +description: "update-paths: Delete cannot be nested" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "{\"b\": \"Delete\"}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-del-noarray-nested.textproto b/firestore/tests/unit/v1/testdata/update-paths-del-noarray-nested.textproto new file mode 100644 index 000000000000..d3da15dda80e --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-del-noarray-nested.textproto @@ -0,0 +1,16 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a field. Deletes are implemented by +# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not +# support array indexing. + +description: "update-paths: Delete cannot be anywhere inside an array value" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "[1, {\"b\": \"Delete\"}]" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-del-noarray.textproto b/firestore/tests/unit/v1/testdata/update-paths-del-noarray.textproto new file mode 100644 index 000000000000..9ebdd0945198 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-del-noarray.textproto @@ -0,0 +1,16 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a field. Deletes are implemented by +# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not +# support array indexing. + +description: "update-paths: Delete cannot be in an array value" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "[1, 2, \"Delete\"]" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-del.textproto b/firestore/tests/unit/v1/testdata/update-paths-del.textproto new file mode 100644 index 000000000000..5197a78488f0 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-del.textproto @@ -0,0 +1,39 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If a field's value is the Delete sentinel, then it doesn't appear in the update +# data, but does in the mask. + +description: "update-paths: Delete" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + json_values: "1" + json_values: "\"Delete\"" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-exists-precond.textproto b/firestore/tests/unit/v1/testdata/update-paths-exists-precond.textproto new file mode 100644 index 000000000000..084e07726ee0 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-exists-precond.textproto @@ -0,0 +1,17 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Update method does not support an explicit exists precondition. + +description: "update-paths: Exists precondition is invalid" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + precondition: < + exists: true + > + field_paths: < + field: "a" + > + json_values: "1" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-fp-del.textproto b/firestore/tests/unit/v1/testdata/update-paths-fp-del.textproto new file mode 100644 index 000000000000..5c92aeb8ca8b --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-fp-del.textproto @@ -0,0 +1,47 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If one nested field is deleted, and another isn't, preserve the second. + +description: "update-paths: field paths with delete" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "foo" + field: "bar" + > + field_paths: < + field: "foo" + field: "delete" + > + json_values: "1" + json_values: "\"Delete\"" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "foo" + value: < + map_value: < + fields: < + key: "bar" + value: < + integer_value: 1 + > + > + > + > + > + > + update_mask: < + field_paths: "foo.bar" + field_paths: "foo.delete" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-fp-dup-transforms.textproto b/firestore/tests/unit/v1/testdata/update-paths-fp-dup-transforms.textproto new file mode 100644 index 000000000000..a84725a8d4d1 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-fp-dup-transforms.textproto @@ -0,0 +1,23 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The same field cannot occur more than once, even if all the operations are +# transforms. + +description: "update-paths: duplicate field path with only transforms" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + field_paths: < + field: "a" + > + json_values: "[\"ArrayUnion\", 1, 2, 3]" + json_values: "\"ServerTimestamp\"" + json_values: "[\"ArrayUnion\", 4, 5, 6]" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-fp-dup.textproto b/firestore/tests/unit/v1/testdata/update-paths-fp-dup.textproto new file mode 100644 index 000000000000..fedbd3aab99d --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-fp-dup.textproto @@ -0,0 +1,22 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The same field cannot occur more than once. + +description: "update-paths: duplicate field path" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + field_paths: < + field: "a" + > + json_values: "1" + json_values: "2" + json_values: "3" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-fp-empty-component.textproto b/firestore/tests/unit/v1/testdata/update-paths-fp-empty-component.textproto new file mode 100644 index 000000000000..7a5df25b7ed2 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-fp-empty-component.textproto @@ -0,0 +1,15 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Empty fields are not allowed. + +description: "update-paths: empty field path component" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "*" + field: "" + > + json_values: "1" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-fp-empty.textproto b/firestore/tests/unit/v1/testdata/update-paths-fp-empty.textproto new file mode 100644 index 000000000000..311e309326d1 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-fp-empty.textproto @@ -0,0 +1,13 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A FieldPath of length zero is invalid. + +description: "update-paths: empty field path" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + > + json_values: "1" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-fp-multi.textproto b/firestore/tests/unit/v1/testdata/update-paths-fp-multi.textproto new file mode 100644 index 000000000000..9ba41e39812c --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-fp-multi.textproto @@ -0,0 +1,42 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The UpdatePaths or equivalent method takes a list of FieldPaths. Each FieldPath +# is a sequence of uninterpreted path components. + +description: "update-paths: multiple-element field path" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + field: "b" + > + json_values: "1" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + map_value: < + fields: < + key: "b" + value: < + integer_value: 1 + > + > + > + > + > + > + update_mask: < + field_paths: "a.b" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-fp-nosplit.textproto b/firestore/tests/unit/v1/testdata/update-paths-fp-nosplit.textproto new file mode 100644 index 000000000000..516495266707 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-fp-nosplit.textproto @@ -0,0 +1,48 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# FieldPath components are not split on dots. + +description: "update-paths: FieldPath elements are not split on dots" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a.b" + field: "f.g" + > + json_values: "{\"n.o\": 7}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a.b" + value: < + map_value: < + fields: < + key: "f.g" + value: < + map_value: < + fields: < + key: "n.o" + value: < + integer_value: 7 + > + > + > + > + > + > + > + > + > + update_mask: < + field_paths: "`a.b`.`f.g`" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-no-paths.textproto b/firestore/tests/unit/v1/testdata/update-paths-no-paths.textproto new file mode 100644 index 000000000000..d9939dc94701 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-no-paths.textproto @@ -0,0 +1,10 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# It is a client-side error to call Update with empty data. + +description: "update-paths: no paths" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-prefix-1.textproto b/firestore/tests/unit/v1/testdata/update-paths-prefix-1.textproto new file mode 100644 index 000000000000..1710b91097e3 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-prefix-1.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# In the input data, one field cannot be a prefix of another. + +description: "update-paths: prefix #1" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + field: "b" + > + field_paths: < + field: "a" + > + json_values: "1" + json_values: "2" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-prefix-2.textproto b/firestore/tests/unit/v1/testdata/update-paths-prefix-2.textproto new file mode 100644 index 000000000000..be78ab58a63b --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-prefix-2.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# In the input data, one field cannot be a prefix of another. + +description: "update-paths: prefix #2" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "a" + field: "b" + > + json_values: "1" + json_values: "2" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-prefix-3.textproto b/firestore/tests/unit/v1/testdata/update-paths-prefix-3.textproto new file mode 100644 index 000000000000..b8a84c9d1f80 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-prefix-3.textproto @@ -0,0 +1,20 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# In the input data, one field cannot be a prefix of another, even if the values +# could in principle be combined. + +description: "update-paths: prefix #3" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "a" + field: "d" + > + json_values: "{\"b\": 1}" + json_values: "2" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-special-chars.textproto b/firestore/tests/unit/v1/testdata/update-paths-special-chars.textproto new file mode 100644 index 000000000000..51cb33b31268 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-special-chars.textproto @@ -0,0 +1,53 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# FieldPaths can contain special characters. + +description: "update-paths: special characters" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "*" + field: "~" + > + field_paths: < + field: "*" + field: "`" + > + json_values: "1" + json_values: "2" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "*" + value: < + map_value: < + fields: < + key: "`" + value: < + integer_value: 2 + > + > + fields: < + key: "~" + value: < + integer_value: 1 + > + > + > + > + > + > + update_mask: < + field_paths: "`*`.`\\``" + field_paths: "`*`.`~`" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-st-alone.textproto b/firestore/tests/unit/v1/testdata/update-paths-st-alone.textproto new file mode 100644 index 000000000000..abc44f55b463 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-st-alone.textproto @@ -0,0 +1,29 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the only values in the input are ServerTimestamps, then no update operation +# should be produced. + +description: "update-paths: ServerTimestamp alone" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "\"ServerTimestamp\"" + request: < + database: "projects/projectID/databases/(default)" + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a" + set_to_server_value: REQUEST_TIME + > + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-st-multi.textproto b/firestore/tests/unit/v1/testdata/update-paths-st-multi.textproto new file mode 100644 index 000000000000..b0b7df17d836 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-st-multi.textproto @@ -0,0 +1,56 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can have more than one ServerTimestamp field. Since all the +# ServerTimestamp fields are removed, the only field in the update is "a". + +description: "update-paths: multiple ServerTimestamp fields" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + field_paths: < + field: "c" + > + json_values: "1" + json_values: "\"ServerTimestamp\"" + json_values: "{\"d\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "c" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + field_transforms: < + field_path: "c.d" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-st-nested.textproto b/firestore/tests/unit/v1/testdata/update-paths-st-nested.textproto new file mode 100644 index 000000000000..3077368318e8 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-st-nested.textproto @@ -0,0 +1,49 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A ServerTimestamp value can occur at any depth. In this case, the transform +# applies to the field path "b.c". Since "c" is removed from the update, "b" +# becomes empty, so it is also removed from the update. + +description: "update-paths: nested ServerTimestamp field" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + json_values: "1" + json_values: "{\"c\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b.c" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-st-noarray-nested.textproto b/firestore/tests/unit/v1/testdata/update-paths-st-noarray-nested.textproto new file mode 100644 index 000000000000..2c2cb89b62f4 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-st-noarray-nested.textproto @@ -0,0 +1,15 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There cannot be an array value anywhere on the path from the document root to +# the ServerTimestamp sentinel. Firestore transforms don't support array indexing. + +description: "update-paths: ServerTimestamp cannot be anywhere inside an array value" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "[1, {\"b\": \"ServerTimestamp\"}]" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-st-noarray.textproto b/firestore/tests/unit/v1/testdata/update-paths-st-noarray.textproto new file mode 100644 index 000000000000..a2baa66f5762 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-st-noarray.textproto @@ -0,0 +1,15 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The ServerTimestamp sentinel must be the value of a field. Firestore transforms +# don't support array indexing. + +description: "update-paths: ServerTimestamp cannot be in an array value" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "[1, 2, \"ServerTimestamp\"]" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-st-with-empty-map.textproto b/firestore/tests/unit/v1/testdata/update-paths-st-with-empty-map.textproto new file mode 100644 index 000000000000..a54a241565de --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-st-with-empty-map.textproto @@ -0,0 +1,51 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# When a ServerTimestamp and a map both reside inside a map, the ServerTimestamp +# should be stripped out but the empty map should remain. + +description: "update-paths: ServerTimestamp beside an empty map" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "{\"b\": {}, \"c\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + map_value: < + fields: < + key: "b" + value: < + map_value: < + > + > + > + > + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a.c" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-st.textproto b/firestore/tests/unit/v1/testdata/update-paths-st.textproto new file mode 100644 index 000000000000..40634c165864 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-st.textproto @@ -0,0 +1,49 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A key with the special ServerTimestamp sentinel is removed from the data in the +# update operation. Instead it appears in a separate Transform operation. Note +# that in these tests, the string "ServerTimestamp" should be replaced with the +# special ServerTimestamp value. + +description: "update-paths: ServerTimestamp with data" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + json_values: "1" + json_values: "\"ServerTimestamp\"" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-paths-uptime.textproto b/firestore/tests/unit/v1/testdata/update-paths-uptime.textproto new file mode 100644 index 000000000000..7a15874bea64 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-paths-uptime.textproto @@ -0,0 +1,40 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Update call supports a last-update-time precondition. + +description: "update-paths: last-update-time precondition" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + precondition: < + update_time: < + seconds: 42 + > + > + field_paths: < + field: "a" + > + json_values: "1" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + update_time: < + seconds: 42 + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-prefix-1.textproto b/firestore/tests/unit/v1/testdata/update-prefix-1.textproto new file mode 100644 index 000000000000..e5c895e73b49 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-prefix-1.textproto @@ -0,0 +1,11 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# In the input data, one field cannot be a prefix of another. + +description: "update: prefix #1" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a.b\": 1, \"a\": 2}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-prefix-2.textproto b/firestore/tests/unit/v1/testdata/update-prefix-2.textproto new file mode 100644 index 000000000000..4870176186a7 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-prefix-2.textproto @@ -0,0 +1,11 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# In the input data, one field cannot be a prefix of another. + +description: "update: prefix #2" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"a.b\": 2}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-prefix-3.textproto b/firestore/tests/unit/v1/testdata/update-prefix-3.textproto new file mode 100644 index 000000000000..0c03b0d6b845 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-prefix-3.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# In the input data, one field cannot be a prefix of another, even if the values +# could in principle be combined. + +description: "update: prefix #3" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": {\"b\": 1}, \"a.d\": 2}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-quoting.textproto b/firestore/tests/unit/v1/testdata/update-quoting.textproto new file mode 100644 index 000000000000..20e530a7609a --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-quoting.textproto @@ -0,0 +1,45 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# In a field path, any component beginning with a non-letter or underscore is +# quoted. + +description: "update: non-letter starting chars are quoted, except underscore" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"_0.1.+2\": 1}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "_0" + value: < + map_value: < + fields: < + key: "1" + value: < + map_value: < + fields: < + key: "+2" + value: < + integer_value: 1 + > + > + > + > + > + > + > + > + > + update_mask: < + field_paths: "_0.`1`.`+2`" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-split-top-level.textproto b/firestore/tests/unit/v1/testdata/update-split-top-level.textproto new file mode 100644 index 000000000000..d1b0ca0da163 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-split-top-level.textproto @@ -0,0 +1,45 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Update method splits only top-level keys at dots. Keys at other levels are +# taken literally. + +description: "update: Split on dots for top-level keys only" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"h.g\": {\"j.k\": 6}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "h" + value: < + map_value: < + fields: < + key: "g" + value: < + map_value: < + fields: < + key: "j.k" + value: < + integer_value: 6 + > + > + > + > + > + > + > + > + > + update_mask: < + field_paths: "h.g" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-split.textproto b/firestore/tests/unit/v1/testdata/update-split.textproto new file mode 100644 index 000000000000..b96fd6a4f70a --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-split.textproto @@ -0,0 +1,44 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Update method splits top-level keys at dots. + +description: "update: split on dots" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a.b.c\": 1}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + map_value: < + fields: < + key: "b" + value: < + map_value: < + fields: < + key: "c" + value: < + integer_value: 1 + > + > + > + > + > + > + > + > + > + update_mask: < + field_paths: "a.b.c" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-st-alone.textproto b/firestore/tests/unit/v1/testdata/update-st-alone.textproto new file mode 100644 index 000000000000..0d5ab6e9fbaf --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-st-alone.textproto @@ -0,0 +1,26 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the only values in the input are ServerTimestamps, then no update operation +# should be produced. + +description: "update: ServerTimestamp alone" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a" + set_to_server_value: REQUEST_TIME + > + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-st-dot.textproto b/firestore/tests/unit/v1/testdata/update-st-dot.textproto new file mode 100644 index 000000000000..19d4d18432e7 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-st-dot.textproto @@ -0,0 +1,27 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Like other uses of ServerTimestamp, the data is pruned and the field does not +# appear in the update mask, because it is in the transform. In this case An +# update operation is produced just to hold the precondition. + +description: "update: ServerTimestamp with dotted field" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a.b.c\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a.b.c" + set_to_server_value: REQUEST_TIME + > + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-st-multi.textproto b/firestore/tests/unit/v1/testdata/update-st-multi.textproto new file mode 100644 index 000000000000..0434cb59ab5a --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-st-multi.textproto @@ -0,0 +1,49 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can have more than one ServerTimestamp field. Since all the +# ServerTimestamp fields are removed, the only field in the update is "a". + +# b is not in the mask because it will be set in the transform. c must be in the +# mask: it should be replaced entirely. The transform will set c.d to the +# timestamp, but the update will delete the rest of c. + +description: "update: multiple ServerTimestamp fields" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": {\"d\": \"ServerTimestamp\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "c" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + field_transforms: < + field_path: "c.d" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-st-nested.textproto b/firestore/tests/unit/v1/testdata/update-st-nested.textproto new file mode 100644 index 000000000000..f79d9c6a072a --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-st-nested.textproto @@ -0,0 +1,42 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A ServerTimestamp value can occur at any depth. In this case, the transform +# applies to the field path "b.c". Since "c" is removed from the update, "b" +# becomes empty, so it is also removed from the update. + +description: "update: nested ServerTimestamp field" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": {\"c\": \"ServerTimestamp\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b.c" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-st-noarray-nested.textproto b/firestore/tests/unit/v1/testdata/update-st-noarray-nested.textproto new file mode 100644 index 000000000000..2939dd646436 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-st-noarray-nested.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There cannot be an array value anywhere on the path from the document root to +# the ServerTimestamp sentinel. Firestore transforms don't support array indexing. + +description: "update: ServerTimestamp cannot be anywhere inside an array value" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": \"ServerTimestamp\"}]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-st-noarray.textproto b/firestore/tests/unit/v1/testdata/update-st-noarray.textproto new file mode 100644 index 000000000000..f3879cdf2260 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-st-noarray.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The ServerTimestamp sentinel must be the value of a field. Firestore transforms +# don't support array indexing. + +description: "update: ServerTimestamp cannot be in an array value" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, \"ServerTimestamp\"]}" + is_error: true +> diff --git a/firestore/tests/unit/v1/testdata/update-st-with-empty-map.textproto b/firestore/tests/unit/v1/testdata/update-st-with-empty-map.textproto new file mode 100644 index 000000000000..1901de2a15ef --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-st-with-empty-map.textproto @@ -0,0 +1,48 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# When a ServerTimestamp and a map both reside inside a map, the ServerTimestamp +# should be stripped out but the empty map should remain. + +description: "update: ServerTimestamp beside an empty map" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": {\"b\": {}, \"c\": \"ServerTimestamp\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + map_value: < + fields: < + key: "b" + value: < + map_value: < + > + > + > + > + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a.c" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-st.textproto b/firestore/tests/unit/v1/testdata/update-st.textproto new file mode 100644 index 000000000000..12045a9220dc --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-st.textproto @@ -0,0 +1,42 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A key with the special ServerTimestamp sentinel is removed from the data in the +# update operation. Instead it appears in a separate Transform operation. Note +# that in these tests, the string "ServerTimestamp" should be replaced with the +# special ServerTimestamp value. + +description: "update: ServerTimestamp with data" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/v1/testdata/update-uptime.textproto b/firestore/tests/unit/v1/testdata/update-uptime.textproto new file mode 100644 index 000000000000..66119ac61c13 --- /dev/null +++ b/firestore/tests/unit/v1/testdata/update-uptime.textproto @@ -0,0 +1,37 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Update call supports a last-update-time precondition. + +description: "update: last-update-time precondition" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + precondition: < + update_time: < + seconds: 42 + > + > + json_data: "{\"a\": 1}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + update_time: < + seconds: 42 + > + > + > + > +> From b5e9b27d1667387739c537e8a496106f84e8639a Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 6 Mar 2019 15:41:28 -0500 Subject: [PATCH 5/7] Use 'v1' rather than 'v1beta1' in unversioned wrapper. --- firestore/google/cloud/firestore.py | 38 ++++++++++++++--------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/firestore/google/cloud/firestore.py b/firestore/google/cloud/firestore.py index 98ccb62f3416..2c47317fa593 100644 --- a/firestore/google/cloud/firestore.py +++ b/firestore/google/cloud/firestore.py @@ -15,25 +15,25 @@ """Python idiomatic client for Google Cloud Firestore.""" -from google.cloud.firestore_v1beta1 import __version__ -from google.cloud.firestore_v1beta1 import Client -from google.cloud.firestore_v1beta1 import CollectionReference -from google.cloud.firestore_v1beta1 import DELETE_FIELD -from google.cloud.firestore_v1beta1 import DocumentReference -from google.cloud.firestore_v1beta1 import DocumentSnapshot -from google.cloud.firestore_v1beta1 import enums -from google.cloud.firestore_v1beta1 import ExistsOption -from google.cloud.firestore_v1beta1 import GeoPoint -from google.cloud.firestore_v1beta1 import LastUpdateOption -from google.cloud.firestore_v1beta1 import Query -from google.cloud.firestore_v1beta1 import ReadAfterWriteError -from google.cloud.firestore_v1beta1 import SERVER_TIMESTAMP -from google.cloud.firestore_v1beta1 import Transaction -from google.cloud.firestore_v1beta1 import transactional -from google.cloud.firestore_v1beta1 import types -from google.cloud.firestore_v1beta1 import Watch -from google.cloud.firestore_v1beta1 import WriteBatch -from google.cloud.firestore_v1beta1 import WriteOption +from google.cloud.firestore_v1 import __version__ +from google.cloud.firestore_v1 import Client +from google.cloud.firestore_v1 import CollectionReference +from google.cloud.firestore_v1 import DELETE_FIELD +from google.cloud.firestore_v1 import DocumentReference +from google.cloud.firestore_v1 import DocumentSnapshot +from google.cloud.firestore_v1 import enums +from google.cloud.firestore_v1 import ExistsOption +from google.cloud.firestore_v1 import GeoPoint +from google.cloud.firestore_v1 import LastUpdateOption +from google.cloud.firestore_v1 import Query +from google.cloud.firestore_v1 import ReadAfterWriteError +from google.cloud.firestore_v1 import SERVER_TIMESTAMP +from google.cloud.firestore_v1 import Transaction +from google.cloud.firestore_v1 import transactional +from google.cloud.firestore_v1 import types +from google.cloud.firestore_v1 import Watch +from google.cloud.firestore_v1 import WriteBatch +from google.cloud.firestore_v1 import WriteOption __all__ = [ From 2688e2227c3f3be85b4fe89daa00c5e5851fdc8e Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 27 Mar 2019 15:37:14 -0400 Subject: [PATCH 6/7] Re-run synth to pick up proto changes. --- .../cloud/firestore_v1/proto/common.proto | 13 +- .../cloud/firestore_v1/proto/common_pb2.py | 6 +- .../cloud/firestore_v1/proto/document.proto | 5 +- .../cloud/firestore_v1/proto/document_pb2.py | 6 +- .../cloud/firestore_v1/proto/firestore.proto | 77 ++++------ .../firestore_v1/proto/firestore_pb2_grpc.py | 8 +- .../cloud/firestore_v1/proto/query.proto | 15 +- .../cloud/firestore_v1/proto/query_pb2.py | 112 +++++++------- .../cloud/firestore_v1/proto/write.proto | 52 +++---- .../cloud/firestore_v1/proto/write_pb2.py | 8 +- .../firestore_v1beta1/proto/common.proto | 11 +- .../firestore_v1beta1/proto/common_pb2.py | 6 +- .../firestore_v1beta1/proto/document.proto | 5 +- .../firestore_v1beta1/proto/document_pb2.py | 6 +- .../firestore_v1beta1/proto/firestore.proto | 93 ++++-------- .../proto/firestore_pb2_grpc.py | 8 +- .../cloud/firestore_v1beta1/proto/query.proto | 33 ++-- .../firestore_v1beta1/proto/query_pb2.py | 142 +++++++++--------- .../cloud/firestore_v1beta1/proto/write.proto | 52 +++---- .../firestore_v1beta1/proto/write_pb2.py | 6 +- firestore/synth.metadata | 12 +- 21 files changed, 302 insertions(+), 374 deletions(-) diff --git a/firestore/google/cloud/firestore_v1/proto/common.proto b/firestore/google/cloud/firestore_v1/proto/common.proto index 9a0ae7deae84..59c62997ad0d 100644 --- a/firestore/google/cloud/firestore_v1/proto/common.proto +++ b/firestore/google/cloud/firestore_v1/proto/common.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,8 +17,8 @@ syntax = "proto3"; package google.firestore.v1; -import "google/api/annotations.proto"; import "google/protobuf/timestamp.proto"; +import "google/api/annotations.proto"; option csharp_namespace = "Google.Cloud.Firestore.V1"; option go_package = "google.golang.org/genproto/googleapis/firestore/v1;firestore"; @@ -28,16 +28,15 @@ option java_package = "com.google.firestore.v1"; option objc_class_prefix = "GCFS"; option php_namespace = "Google\\Cloud\\Firestore\\V1"; + // A set of field paths on a document. // Used to restrict a get or update operation on a document to a subset of its // fields. // This is different from standard field masks, as this is always scoped to a -// [Document][google.firestore.v1.Document], and takes in account the dynamic -// nature of [Value][google.firestore.v1.Value]. +// [Document][google.firestore.v1.Document], and takes in account the dynamic nature of [Value][google.firestore.v1.Value]. message DocumentMask { - // The list of field paths in the mask. See - // [Document.fields][google.firestore.v1.Document.fields] for a field path - // syntax reference. + // The list of field paths in the mask. See [Document.fields][google.firestore.v1.Document.fields] for a field + // path syntax reference. repeated string field_paths = 1; } diff --git a/firestore/google/cloud/firestore_v1/proto/common_pb2.py b/firestore/google/cloud/firestore_v1/proto/common_pb2.py index b94341f7195c..d02facf144ce 100644 --- a/firestore/google/cloud/firestore_v1/proto/common_pb2.py +++ b/firestore/google/cloud/firestore_v1/proto/common_pb2.py @@ -14,8 +14,8 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -26,11 +26,11 @@ "\n\027com.google.firestore.v1B\013CommonProtoP\001Z\n\x06\x66ields\x18\x02 \x03(\x0b\x32..google.firestore.v1beta1.Document.FieldsEntry\x12/\n\x0b\x63reate_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1aN\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value:\x02\x38\x01"\xb8\x03\n\x05Value\x12\x30\n\nnull_value\x18\x0b \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x17\n\rboolean_value\x18\x01 \x01(\x08H\x00\x12\x17\n\rinteger_value\x18\x02 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x03 \x01(\x01H\x00\x12\x35\n\x0ftimestamp_value\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x16\n\x0cstring_value\x18\x11 \x01(\tH\x00\x12\x15\n\x0b\x62ytes_value\x18\x12 \x01(\x0cH\x00\x12\x19\n\x0freference_value\x18\x05 \x01(\tH\x00\x12.\n\x0fgeo_point_value\x18\x08 \x01(\x0b\x32\x13.google.type.LatLngH\x00\x12;\n\x0b\x61rray_value\x18\t \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00\x12\x37\n\tmap_value\x18\x06 \x01(\x0b\x32".google.firestore.v1beta1.MapValueH\x00\x42\x0c\n\nvalue_type"=\n\nArrayValue\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value"\x9a\x01\n\x08MapValue\x12>\n\x06\x66ields\x18\x01 \x03(\x0b\x32..google.firestore.v1beta1.MapValue.FieldsEntry\x1aN\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value:\x02\x38\x01\x42\xbb\x01\n\x1c\x63om.google.firestore.v1beta1B\rDocumentProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3' + '\n3google/cloud/firestore_v1beta1/proto/document.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x18google/type/latlng.proto\x1a\x1cgoogle/api/annotations.proto"\x8a\x02\n\x08\x44ocument\x12\x0c\n\x04name\x18\x01 \x01(\t\x12>\n\x06\x66ields\x18\x02 \x03(\x0b\x32..google.firestore.v1beta1.Document.FieldsEntry\x12/\n\x0b\x63reate_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1aN\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value:\x02\x38\x01"\xb8\x03\n\x05Value\x12\x30\n\nnull_value\x18\x0b \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x17\n\rboolean_value\x18\x01 \x01(\x08H\x00\x12\x17\n\rinteger_value\x18\x02 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x03 \x01(\x01H\x00\x12\x35\n\x0ftimestamp_value\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x16\n\x0cstring_value\x18\x11 \x01(\tH\x00\x12\x15\n\x0b\x62ytes_value\x18\x12 \x01(\x0cH\x00\x12\x19\n\x0freference_value\x18\x05 \x01(\tH\x00\x12.\n\x0fgeo_point_value\x18\x08 \x01(\x0b\x32\x13.google.type.LatLngH\x00\x12;\n\x0b\x61rray_value\x18\t \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00\x12\x37\n\tmap_value\x18\x06 \x01(\x0b\x32".google.firestore.v1beta1.MapValueH\x00\x42\x0c\n\nvalue_type"=\n\nArrayValue\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value"\x9a\x01\n\x08MapValue\x12>\n\x06\x66ields\x18\x01 \x03(\x0b\x32..google.firestore.v1beta1.MapValue.FieldsEntry\x1aN\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value:\x02\x38\x01\x42\xbb\x01\n\x1c\x63om.google.firestore.v1beta1B\rDocumentProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_type_dot_latlng__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, ], ) diff --git a/firestore/google/cloud/firestore_v1beta1/proto/firestore.proto b/firestore/google/cloud/firestore_v1beta1/proto/firestore.proto index dc310d70bbdd..b7ba79075a40 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/firestore.proto +++ b/firestore/google/cloud/firestore_v1beta1/proto/firestore.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -33,7 +33,6 @@ option java_outer_classname = "FirestoreProto"; option java_package = "com.google.firestore.v1beta1"; option objc_class_prefix = "GCFS"; option php_namespace = "Google\\Cloud\\Firestore\\V1beta1"; - // Specification of the Firestore API. // The Cloud Firestore service. @@ -94,8 +93,7 @@ service Firestore { // // Documents returned by this method are not guaranteed to be returned in the // same order that they were requested. - rpc BatchGetDocuments(BatchGetDocumentsRequest) - returns (stream BatchGetDocumentsResponse) { + rpc BatchGetDocuments(BatchGetDocumentsRequest) returns (stream BatchGetDocumentsResponse) { option (google.api.http) = { post: "/v1beta1/{database=projects/*/databases/*}/documents:batchGet" body: "*" @@ -103,8 +101,7 @@ service Firestore { } // Starts a new transaction. - rpc BeginTransaction(BeginTransactionRequest) - returns (BeginTransactionResponse) { + rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) { option (google.api.http) = { post: "/v1beta1/{database=projects/*/databases/*}/documents:beginTransaction" body: "*" @@ -156,8 +153,7 @@ service Firestore { } // Lists all the collection IDs underneath a document. - rpc ListCollectionIds(ListCollectionIdsRequest) - returns (ListCollectionIdsResponse) { + rpc ListCollectionIds(ListCollectionIdsRequest) returns (ListCollectionIdsResponse) { option (google.api.http) = { post: "/v1beta1/{parent=projects/*/databases/*/documents}:listCollectionIds" body: "*" @@ -169,8 +165,7 @@ service Firestore { } } -// The request for -// [Firestore.GetDocument][google.firestore.v1beta1.Firestore.GetDocument]. +// The request for [Firestore.GetDocument][google.firestore.v1beta1.Firestore.GetDocument]. message GetDocumentRequest { // The resource name of the Document to get. In the format: // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. @@ -194,8 +189,7 @@ message GetDocumentRequest { } } -// The request for -// [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments]. +// The request for [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments]. message ListDocumentsRequest { // The parent resource name. In the format: // `projects/{project_id}/databases/{database_id}/documents` or @@ -237,17 +231,15 @@ message ListDocumentsRequest { // If the list should show missing documents. A missing document is a // document that does not exist but has sub-documents. These documents will - // be returned with a key but will not have fields, - // [Document.create_time][google.firestore.v1beta1.Document.create_time], or - // [Document.update_time][google.firestore.v1beta1.Document.update_time] set. + // be returned with a key but will not have fields, [Document.create_time][google.firestore.v1beta1.Document.create_time], + // or [Document.update_time][google.firestore.v1beta1.Document.update_time] set. // // Requests with `show_missing` may not specify `where` or // `order_by`. bool show_missing = 12; } -// The response for -// [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments]. +// The response for [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments]. message ListDocumentsResponse { // The Documents found. repeated Document documents = 1; @@ -256,8 +248,7 @@ message ListDocumentsResponse { string next_page_token = 2; } -// The request for -// [Firestore.CreateDocument][google.firestore.v1beta1.Firestore.CreateDocument]. +// The request for [Firestore.CreateDocument][google.firestore.v1beta1.Firestore.CreateDocument]. message CreateDocumentRequest { // The parent resource. For example: // `projects/{project_id}/databases/{database_id}/documents` or @@ -282,8 +273,7 @@ message CreateDocumentRequest { DocumentMask mask = 5; } -// The request for -// [Firestore.UpdateDocument][google.firestore.v1beta1.Firestore.UpdateDocument]. +// The request for [Firestore.UpdateDocument][google.firestore.v1beta1.Firestore.UpdateDocument]. message UpdateDocumentRequest { // The updated document. // Creates the document if it does not already exist. @@ -309,8 +299,7 @@ message UpdateDocumentRequest { Precondition current_document = 4; } -// The request for -// [Firestore.DeleteDocument][google.firestore.v1beta1.Firestore.DeleteDocument]. +// The request for [Firestore.DeleteDocument][google.firestore.v1beta1.Firestore.DeleteDocument]. message DeleteDocumentRequest { // The resource name of the Document to delete. In the format: // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. @@ -321,8 +310,7 @@ message DeleteDocumentRequest { Precondition current_document = 2; } -// The request for -// [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments]. +// The request for [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments]. message BatchGetDocumentsRequest { // The database name. In the format: // `projects/{project_id}/databases/{database_id}`. @@ -358,8 +346,7 @@ message BatchGetDocumentsRequest { } } -// The streamed response for -// [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments]. +// The streamed response for [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments]. message BatchGetDocumentsResponse { // A single result. // This can be empty if the server is just returning a transaction. @@ -374,8 +361,7 @@ message BatchGetDocumentsResponse { // The transaction that was started as part of this request. // Will only be set in the first response, and only if - // [BatchGetDocumentsRequest.new_transaction][google.firestore.v1beta1.BatchGetDocumentsRequest.new_transaction] - // was set in the request. + // [BatchGetDocumentsRequest.new_transaction][google.firestore.v1beta1.BatchGetDocumentsRequest.new_transaction] was set in the request. bytes transaction = 3; // The time at which the document was read. @@ -385,8 +371,7 @@ message BatchGetDocumentsResponse { google.protobuf.Timestamp read_time = 4; } -// The request for -// [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction]. +// The request for [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction]. message BeginTransactionRequest { // The database name. In the format: // `projects/{project_id}/databases/{database_id}`. @@ -397,15 +382,13 @@ message BeginTransactionRequest { TransactionOptions options = 2; } -// The response for -// [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction]. +// The response for [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction]. message BeginTransactionResponse { // The transaction that was started. bytes transaction = 1; } -// The request for -// [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit]. +// The request for [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit]. message CommitRequest { // The database name. In the format: // `projects/{project_id}/databases/{database_id}`. @@ -420,8 +403,7 @@ message CommitRequest { bytes transaction = 3; } -// The response for -// [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit]. +// The response for [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit]. message CommitResponse { // The result of applying the writes. // @@ -433,8 +415,7 @@ message CommitResponse { google.protobuf.Timestamp commit_time = 2; } -// The request for -// [Firestore.Rollback][google.firestore.v1beta1.Firestore.Rollback]. +// The request for [Firestore.Rollback][google.firestore.v1beta1.Firestore.Rollback]. message RollbackRequest { // The database name. In the format: // `projects/{project_id}/databases/{database_id}`. @@ -444,8 +425,7 @@ message RollbackRequest { bytes transaction = 2; } -// The request for -// [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery]. +// The request for [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery]. message RunQueryRequest { // The parent resource name. In the format: // `projects/{project_id}/databases/{database_id}/documents` or @@ -479,14 +459,12 @@ message RunQueryRequest { } } -// The response for -// [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery]. +// The response for [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery]. message RunQueryResponse { // The transaction that was started as part of this request. // Can only be set in the first response, and only if - // [RunQueryRequest.new_transaction][google.firestore.v1beta1.RunQueryRequest.new_transaction] - // was set in the request. If set, no other fields will be set in this - // response. + // [RunQueryRequest.new_transaction][google.firestore.v1beta1.RunQueryRequest.new_transaction] was set in the request. + // If set, no other fields will be set in this response. bytes transaction = 2; // A query result. @@ -539,9 +517,9 @@ message WriteRequest { // A stream token that was previously sent by the server. // // The client should set this field to the token from the most recent - // [WriteResponse][google.firestore.v1beta1.WriteResponse] it has received. - // This acknowledges that the client has received responses up to this token. - // After sending this token, earlier tokens may not be used anymore. + // [WriteResponse][google.firestore.v1beta1.WriteResponse] it has received. This acknowledges that the client has + // received responses up to this token. After sending this token, earlier + // tokens may not be used anymore. // // The server may close the stream if there are too many unacknowledged // responses. @@ -597,8 +575,7 @@ message ListenRequest { map labels = 4; } -// The response for -// [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen]. +// The response for [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen]. message ListenResponse { // The supported responses. oneof response_type { @@ -611,8 +588,8 @@ message ListenResponse { // A [Document][google.firestore.v1beta1.Document] has been deleted. DocumentDelete document_delete = 4; - // A [Document][google.firestore.v1beta1.Document] has been removed from a - // target (because it is no longer relevant to that target). + // A [Document][google.firestore.v1beta1.Document] has been removed from a target (because it is no longer + // relevant to that target). DocumentRemove document_remove = 6; // A filter to apply to the set of documents previously returned for the @@ -666,9 +643,7 @@ message Target { // If not specified, all matching Documents are returned before any // subsequent changes. oneof resume_type { - // A resume token from a prior - // [TargetChange][google.firestore.v1beta1.TargetChange] for an identical - // target. + // A resume token from a prior [TargetChange][google.firestore.v1beta1.TargetChange] for an identical target. // // Using a resume token with a different target is unsupported and may fail. bytes resume_token = 4; @@ -760,8 +735,7 @@ message TargetChange { google.protobuf.Timestamp read_time = 6; } -// The request for -// [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds]. +// The request for [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds]. message ListCollectionIdsRequest { // The parent document. In the format: // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. @@ -777,8 +751,7 @@ message ListCollectionIdsRequest { string page_token = 3; } -// The response from -// [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds]. +// The response from [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds]. message ListCollectionIdsResponse { // The collection ids. repeated string collection_ids = 1; diff --git a/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py b/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py index cf23b20c3884..e3bd63b73f35 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py @@ -11,9 +11,7 @@ class FirestoreStub(object): - """Specification of the Firestore API. - - The Cloud Firestore service. + """The Cloud Firestore service. This service exposes several types of comparable timestamps: @@ -105,9 +103,7 @@ def __init__(self, channel): class FirestoreServicer(object): - """Specification of the Firestore API. - - The Cloud Firestore service. + """The Cloud Firestore service. This service exposes several types of comparable timestamps: diff --git a/firestore/google/cloud/firestore_v1beta1/proto/query.proto b/firestore/google/cloud/firestore_v1beta1/proto/query.proto index 9bd0ad509444..94eec9cbbf3f 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/query.proto +++ b/firestore/google/cloud/firestore_v1beta1/proto/query.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,9 +17,9 @@ syntax = "proto3"; package google.firestore.v1beta1; -import "google/api/annotations.proto"; import "google/firestore/v1beta1/document.proto"; import "google/protobuf/wrappers.proto"; +import "google/api/annotations.proto"; option csharp_namespace = "Google.Cloud.Firestore.V1Beta1"; option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore"; @@ -29,6 +29,7 @@ option java_package = "com.google.firestore.v1beta1"; option objc_class_prefix = "GCFS"; option php_namespace = "Google\\Cloud\\Firestore\\V1beta1"; + // A Firestore query. message StructuredQuery { // A selection of a collection, such as `messages as m1`. @@ -114,6 +115,15 @@ message StructuredQuery { Value value = 3; } + // The projection of document's fields to return. + message Projection { + // The fields to return. + // + // If empty, all fields are returned. To only return the name + // of the document, use `['__name__']`. + repeated FieldReference fields = 2; + } + // A filter with a single operand. message UnaryFilter { // A unary operator. @@ -147,20 +157,6 @@ message StructuredQuery { Direction direction = 2; } - // A reference to a field, such as `max(messages.time) as max_time`. - message FieldReference { - string field_path = 2; - } - - // The projection of document's fields to return. - message Projection { - // The fields to return. - // - // If empty, all fields are returned. To only return the name - // of the document, use `['__name__']`. - repeated FieldReference fields = 2; - } - // A sort direction. enum Direction { // Unspecified. @@ -173,6 +169,11 @@ message StructuredQuery { DESCENDING = 2; } + // A reference to a field, such as `max(messages.time) as max_time`. + message FieldReference { + string field_path = 2; + } + // The projection to return. Projection select = 1; diff --git a/firestore/google/cloud/firestore_v1beta1/proto/query_pb2.py b/firestore/google/cloud/firestore_v1beta1/proto/query_pb2.py index ebe46d17df90..74f21ebec050 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/query_pb2.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/query_pb2.py @@ -14,11 +14,11 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.cloud.firestore_v1beta1.proto import ( document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2, ) from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -29,12 +29,12 @@ "\n\034com.google.firestore.v1beta1B\nQueryProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1" ), serialized_pb=_b( - '\n0google/cloud/firestore_v1beta1/proto/query.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x1egoogle/protobuf/wrappers.proto"\xb9\x0f\n\x0fStructuredQuery\x12\x44\n\x06select\x18\x01 \x01(\x0b\x32\x34.google.firestore.v1beta1.StructuredQuery.Projection\x12J\n\x04\x66rom\x18\x02 \x03(\x0b\x32<.google.firestore.v1beta1.StructuredQuery.CollectionSelector\x12?\n\x05where\x18\x03 \x01(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter\x12\x41\n\x08order_by\x18\x04 \x03(\x0b\x32/.google.firestore.v1beta1.StructuredQuery.Order\x12\x32\n\x08start_at\x18\x07 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x30\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x0e\n\x06offset\x18\x06 \x01(\x05\x12*\n\x05limit\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x1a\x44\n\x12\x43ollectionSelector\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x17\n\x0f\x61ll_descendants\x18\x03 \x01(\x08\x1a\x8c\x02\n\x06\x46ilter\x12U\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32\x39.google.firestore.v1beta1.StructuredQuery.CompositeFilterH\x00\x12M\n\x0c\x66ield_filter\x18\x02 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.FieldFilterH\x00\x12M\n\x0cunary_filter\x18\x03 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.UnaryFilterH\x00\x42\r\n\x0b\x66ilter_type\x1a\xd3\x01\n\x0f\x43ompositeFilter\x12N\n\x02op\x18\x01 \x01(\x0e\x32\x42.google.firestore.v1beta1.StructuredQuery.CompositeFilter.Operator\x12\x41\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\x1a\xec\x02\n\x0b\x46ieldFilter\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12J\n\x02op\x18\x02 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.FieldFilter.Operator\x12.\n\x05value\x18\x03 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value"\x97\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x12\n\x0e\x41RRAY_CONTAINS\x10\x07\x1a\xf3\x01\n\x0bUnaryFilter\x12J\n\x02op\x18\x01 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.UnaryFilter.Operator\x12I\n\x05\x66ield\x18\x02 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReferenceH\x00"=\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\n\n\x06IS_NAN\x10\x02\x12\x0b\n\x07IS_NULL\x10\x03\x42\x0e\n\x0coperand_type\x1a\x98\x01\n\x05Order\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12\x46\n\tdirection\x18\x02 \x01(\x0e\x32\x33.google.firestore.v1beta1.StructuredQuery.Direction\x1a$\n\x0e\x46ieldReference\x12\x12\n\nfield_path\x18\x02 \x01(\t\x1aV\n\nProjection\x12H\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02"I\n\x06\x43ursor\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value\x12\x0e\n\x06\x62\x65\x66ore\x18\x02 \x01(\x08\x42\xb8\x01\n\x1c\x63om.google.firestore.v1beta1B\nQueryProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3' + '\n0google/cloud/firestore_v1beta1/proto/query.proto\x12\x18google.firestore.v1beta1\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto"\xb9\x0f\n\x0fStructuredQuery\x12\x44\n\x06select\x18\x01 \x01(\x0b\x32\x34.google.firestore.v1beta1.StructuredQuery.Projection\x12J\n\x04\x66rom\x18\x02 \x03(\x0b\x32<.google.firestore.v1beta1.StructuredQuery.CollectionSelector\x12?\n\x05where\x18\x03 \x01(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter\x12\x41\n\x08order_by\x18\x04 \x03(\x0b\x32/.google.firestore.v1beta1.StructuredQuery.Order\x12\x32\n\x08start_at\x18\x07 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x30\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x0e\n\x06offset\x18\x06 \x01(\x05\x12*\n\x05limit\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x1a\x44\n\x12\x43ollectionSelector\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x17\n\x0f\x61ll_descendants\x18\x03 \x01(\x08\x1a\x8c\x02\n\x06\x46ilter\x12U\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32\x39.google.firestore.v1beta1.StructuredQuery.CompositeFilterH\x00\x12M\n\x0c\x66ield_filter\x18\x02 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.FieldFilterH\x00\x12M\n\x0cunary_filter\x18\x03 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.UnaryFilterH\x00\x42\r\n\x0b\x66ilter_type\x1a\xd3\x01\n\x0f\x43ompositeFilter\x12N\n\x02op\x18\x01 \x01(\x0e\x32\x42.google.firestore.v1beta1.StructuredQuery.CompositeFilter.Operator\x12\x41\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\x1a\xec\x02\n\x0b\x46ieldFilter\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12J\n\x02op\x18\x02 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.FieldFilter.Operator\x12.\n\x05value\x18\x03 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value"\x97\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x12\n\x0e\x41RRAY_CONTAINS\x10\x07\x1aV\n\nProjection\x12H\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x1a\xf3\x01\n\x0bUnaryFilter\x12J\n\x02op\x18\x01 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.UnaryFilter.Operator\x12I\n\x05\x66ield\x18\x02 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReferenceH\x00"=\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\n\n\x06IS_NAN\x10\x02\x12\x0b\n\x07IS_NULL\x10\x03\x42\x0e\n\x0coperand_type\x1a\x98\x01\n\x05Order\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12\x46\n\tdirection\x18\x02 \x01(\x0e\x32\x33.google.firestore.v1beta1.StructuredQuery.Direction\x1a$\n\x0e\x46ieldReference\x12\x12\n\nfield_path\x18\x02 \x01(\t"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02"I\n\x06\x43ursor\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value\x12\x0e\n\x06\x62\x65\x66ore\x18\x02 \x01(\x08\x42\xb8\x01\n\x1c\x63om.google.firestore.v1beta1B\nQueryProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR, google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, ], ) @@ -132,8 +132,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1742, - serialized_end=1803, + serialized_start=1830, + serialized_end=1891, ) _sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_UNARYFILTER_OPERATOR) @@ -433,6 +433,44 @@ serialized_end=1573, ) +_STRUCTUREDQUERY_PROJECTION = _descriptor.Descriptor( + name="Projection", + full_name="google.firestore.v1beta1.StructuredQuery.Projection", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="fields", + full_name="google.firestore.v1beta1.StructuredQuery.Projection.fields", + index=0, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1575, + serialized_end=1661, +) + _STRUCTUREDQUERY_UNARYFILTER = _descriptor.Descriptor( name="UnaryFilter", full_name="google.firestore.v1beta1.StructuredQuery.UnaryFilter", @@ -493,8 +531,8 @@ fields=[], ) ], - serialized_start=1576, - serialized_end=1819, + serialized_start=1664, + serialized_end=1907, ) _STRUCTUREDQUERY_ORDER = _descriptor.Descriptor( @@ -549,8 +587,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1822, - serialized_end=1974, + serialized_start=1910, + serialized_end=2062, ) _STRUCTUREDQUERY_FIELDREFERENCE = _descriptor.Descriptor( @@ -587,45 +625,7 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1976, - serialized_end=2012, -) - -_STRUCTUREDQUERY_PROJECTION = _descriptor.Descriptor( - name="Projection", - full_name="google.firestore.v1beta1.StructuredQuery.Projection", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="fields", - full_name="google.firestore.v1beta1.StructuredQuery.Projection.fields", - index=0, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2014, + serialized_start=2064, serialized_end=2100, ) @@ -787,10 +787,10 @@ _STRUCTUREDQUERY_FILTER, _STRUCTUREDQUERY_COMPOSITEFILTER, _STRUCTUREDQUERY_FIELDFILTER, + _STRUCTUREDQUERY_PROJECTION, _STRUCTUREDQUERY_UNARYFILTER, _STRUCTUREDQUERY_ORDER, _STRUCTUREDQUERY_FIELDREFERENCE, - _STRUCTUREDQUERY_PROJECTION, ], enum_types=[_STRUCTUREDQUERY_DIRECTION], serialized_options=None, @@ -911,6 +911,10 @@ ) _STRUCTUREDQUERY_FIELDFILTER.containing_type = _STRUCTUREDQUERY _STRUCTUREDQUERY_FIELDFILTER_OPERATOR.containing_type = _STRUCTUREDQUERY_FIELDFILTER +_STRUCTUREDQUERY_PROJECTION.fields_by_name[ + "fields" +].message_type = _STRUCTUREDQUERY_FIELDREFERENCE +_STRUCTUREDQUERY_PROJECTION.containing_type = _STRUCTUREDQUERY _STRUCTUREDQUERY_UNARYFILTER.fields_by_name[ "op" ].enum_type = _STRUCTUREDQUERY_UNARYFILTER_OPERATOR @@ -933,10 +937,6 @@ ].enum_type = _STRUCTUREDQUERY_DIRECTION _STRUCTUREDQUERY_ORDER.containing_type = _STRUCTUREDQUERY _STRUCTUREDQUERY_FIELDREFERENCE.containing_type = _STRUCTUREDQUERY -_STRUCTUREDQUERY_PROJECTION.fields_by_name[ - "fields" -].message_type = _STRUCTUREDQUERY_FIELDREFERENCE -_STRUCTUREDQUERY_PROJECTION.containing_type = _STRUCTUREDQUERY _STRUCTUREDQUERY.fields_by_name["select"].message_type = _STRUCTUREDQUERY_PROJECTION _STRUCTUREDQUERY.fields_by_name[ "from" @@ -1045,6 +1045,23 @@ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.FieldFilter) ), ), + Projection=_reflection.GeneratedProtocolMessageType( + "Projection", + (_message.Message,), + dict( + DESCRIPTOR=_STRUCTUREDQUERY_PROJECTION, + __module__="google.cloud.firestore_v1beta1.proto.query_pb2", + __doc__="""The projection of document's fields to return. + + + Attributes: + fields: + The fields to return. If empty, all fields are returned. To + only return the name of the document, use ``['__name__']``. + """, + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Projection) + ), + ), UnaryFilter=_reflection.GeneratedProtocolMessageType( "UnaryFilter", (_message.Message,), @@ -1094,23 +1111,6 @@ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.FieldReference) ), ), - Projection=_reflection.GeneratedProtocolMessageType( - "Projection", - (_message.Message,), - dict( - DESCRIPTOR=_STRUCTUREDQUERY_PROJECTION, - __module__="google.cloud.firestore_v1beta1.proto.query_pb2", - __doc__="""The projection of document's fields to return. - - - Attributes: - fields: - The fields to return. If empty, all fields are returned. To - only return the name of the document, use ``['__name__']``. - """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Projection) - ), - ), DESCRIPTOR=_STRUCTUREDQUERY, __module__="google.cloud.firestore_v1beta1.proto.query_pb2", __doc__="""A Firestore query. @@ -1157,10 +1157,10 @@ _sym_db.RegisterMessage(StructuredQuery.Filter) _sym_db.RegisterMessage(StructuredQuery.CompositeFilter) _sym_db.RegisterMessage(StructuredQuery.FieldFilter) +_sym_db.RegisterMessage(StructuredQuery.Projection) _sym_db.RegisterMessage(StructuredQuery.UnaryFilter) _sym_db.RegisterMessage(StructuredQuery.Order) _sym_db.RegisterMessage(StructuredQuery.FieldReference) -_sym_db.RegisterMessage(StructuredQuery.Projection) Cursor = _reflection.GeneratedProtocolMessageType( "Cursor", diff --git a/firestore/google/cloud/firestore_v1beta1/proto/write.proto b/firestore/google/cloud/firestore_v1beta1/proto/write.proto index d1ee7d32f376..4e58cc1216e1 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/write.proto +++ b/firestore/google/cloud/firestore_v1beta1/proto/write.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,10 +17,10 @@ syntax = "proto3"; package google.firestore.v1beta1; -import "google/api/annotations.proto"; import "google/firestore/v1beta1/common.proto"; import "google/firestore/v1beta1/document.proto"; import "google/protobuf/timestamp.proto"; +import "google/api/annotations.proto"; option csharp_namespace = "Google.Cloud.Firestore.V1Beta1"; option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore"; @@ -30,6 +30,7 @@ option java_package = "com.google.firestore.v1beta1"; option objc_class_prefix = "GCFS"; option php_namespace = "Google\\Cloud\\Firestore\\V1beta1"; + // A write on a document. message Write { // The operation to execute. @@ -80,9 +81,8 @@ message DocumentTransform { REQUEST_TIME = 1; } - // The path of the field. See - // [Document.fields][google.firestore.v1beta1.Document.fields] for the field - // path syntax reference. + // The path of the field. See [Document.fields][google.firestore.v1beta1.Document.fields] for the field path syntax + // reference. string field_path = 1; // The transformation to apply on the field. @@ -176,21 +176,18 @@ message WriteResult { // previous update_time. google.protobuf.Timestamp update_time = 1; - // The results of applying each - // [DocumentTransform.FieldTransform][google.firestore.v1beta1.DocumentTransform.FieldTransform], - // in the same order. + // The results of applying each [DocumentTransform.FieldTransform][google.firestore.v1beta1.DocumentTransform.FieldTransform], in the + // same order. repeated Value transform_results = 2; } // A [Document][google.firestore.v1beta1.Document] has changed. // -// May be the result of multiple [writes][google.firestore.v1beta1.Write], -// including deletes, that ultimately resulted in a new value for the -// [Document][google.firestore.v1beta1.Document]. +// May be the result of multiple [writes][google.firestore.v1beta1.Write], including deletes, that +// ultimately resulted in a new value for the [Document][google.firestore.v1beta1.Document]. // -// Multiple [DocumentChange][google.firestore.v1beta1.DocumentChange] messages -// may be returned for the same logical change, if multiple targets are -// affected. +// Multiple [DocumentChange][google.firestore.v1beta1.DocumentChange] messages may be returned for the same logical +// change, if multiple targets are affected. message DocumentChange { // The new state of the [Document][google.firestore.v1beta1.Document]. // @@ -206,16 +203,13 @@ message DocumentChange { // A [Document][google.firestore.v1beta1.Document] has been deleted. // -// May be the result of multiple [writes][google.firestore.v1beta1.Write], -// including updates, the last of which deleted the -// [Document][google.firestore.v1beta1.Document]. +// May be the result of multiple [writes][google.firestore.v1beta1.Write], including updates, the +// last of which deleted the [Document][google.firestore.v1beta1.Document]. // -// Multiple [DocumentDelete][google.firestore.v1beta1.DocumentDelete] messages -// may be returned for the same logical delete, if multiple targets are -// affected. +// Multiple [DocumentDelete][google.firestore.v1beta1.DocumentDelete] messages may be returned for the same logical +// delete, if multiple targets are affected. message DocumentDelete { - // The resource name of the [Document][google.firestore.v1beta1.Document] that - // was deleted. + // The resource name of the [Document][google.firestore.v1beta1.Document] that was deleted. string document = 1; // A set of target IDs for targets that previously matched this entity. @@ -227,19 +221,16 @@ message DocumentDelete { google.protobuf.Timestamp read_time = 4; } -// A [Document][google.firestore.v1beta1.Document] has been removed from the -// view of the targets. +// A [Document][google.firestore.v1beta1.Document] has been removed from the view of the targets. // // Sent if the document is no longer relevant to a target and is out of view. // Can be sent instead of a DocumentDelete or a DocumentChange if the server // can not send the new value of the document. // -// Multiple [DocumentRemove][google.firestore.v1beta1.DocumentRemove] messages -// may be returned for the same logical write or delete, if multiple targets are -// affected. +// Multiple [DocumentRemove][google.firestore.v1beta1.DocumentRemove] messages may be returned for the same logical +// write or delete, if multiple targets are affected. message DocumentRemove { - // The resource name of the [Document][google.firestore.v1beta1.Document] that - // has gone out of view. + // The resource name of the [Document][google.firestore.v1beta1.Document] that has gone out of view. string document = 1; // A set of target IDs for targets that previously matched this document. @@ -256,8 +247,7 @@ message ExistenceFilter { // The target ID to which this filter applies. int32 target_id = 1; - // The total count of documents that match - // [target_id][google.firestore.v1beta1.ExistenceFilter.target_id]. + // The total count of documents that match [target_id][google.firestore.v1beta1.ExistenceFilter.target_id]. // // If different from the count of documents in the client that match, the // client must manually determine which documents no longer match the target. diff --git a/firestore/google/cloud/firestore_v1beta1/proto/write_pb2.py b/firestore/google/cloud/firestore_v1beta1/proto/write_pb2.py index e8e275af8e8b..84e9bd8e8660 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/write_pb2.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/write_pb2.py @@ -14,7 +14,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.cloud.firestore_v1beta1.proto import ( common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2, ) @@ -22,6 +21,7 @@ document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2, ) from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -32,13 +32,13 @@ "\n\034com.google.firestore.v1beta1B\nWriteProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1" ), serialized_pb=_b( - '\n0google/cloud/firestore_v1beta1/proto/write.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x9d\x02\n\x05Write\x12\x34\n\x06update\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.DocumentH\x00\x12\x10\n\x06\x64\x65lete\x18\x02 \x01(\tH\x00\x12@\n\ttransform\x18\x06 \x01(\x0b\x32+.google.firestore.v1beta1.DocumentTransformH\x00\x12;\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12@\n\x10\x63urrent_document\x18\x04 \x01(\x0b\x32&.google.firestore.v1beta1.PreconditionB\x0b\n\toperation"\x88\x05\n\x11\x44ocumentTransform\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12T\n\x10\x66ield_transforms\x18\x02 \x03(\x0b\x32:.google.firestore.v1beta1.DocumentTransform.FieldTransform\x1a\x8a\x04\n\x0e\x46ieldTransform\x12\x12\n\nfield_path\x18\x01 \x01(\t\x12\x65\n\x13set_to_server_value\x18\x02 \x01(\x0e\x32\x46.google.firestore.v1beta1.DocumentTransform.FieldTransform.ServerValueH\x00\x12\x34\n\tincrement\x18\x03 \x01(\x0b\x32\x1f.google.firestore.v1beta1.ValueH\x00\x12\x32\n\x07maximum\x18\x04 \x01(\x0b\x32\x1f.google.firestore.v1beta1.ValueH\x00\x12\x32\n\x07minimum\x18\x05 \x01(\x0b\x32\x1f.google.firestore.v1beta1.ValueH\x00\x12G\n\x17\x61ppend_missing_elements\x18\x06 \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00\x12\x45\n\x15remove_all_from_array\x18\x07 \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00"=\n\x0bServerValue\x12\x1c\n\x18SERVER_VALUE_UNSPECIFIED\x10\x00\x12\x10\n\x0cREQUEST_TIME\x10\x01\x42\x10\n\x0etransform_type"z\n\x0bWriteResult\x12/\n\x0bupdate_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x11transform_results\x18\x02 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value"v\n\x0e\x44ocumentChange\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12\x12\n\ntarget_ids\x18\x05 \x03(\x05\x12\x1a\n\x12removed_target_ids\x18\x06 \x03(\x05"m\n\x0e\x44ocumentDelete\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12\x1a\n\x12removed_target_ids\x18\x06 \x03(\x05\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"m\n\x0e\x44ocumentRemove\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12\x1a\n\x12removed_target_ids\x18\x02 \x03(\x05\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"3\n\x0f\x45xistenceFilter\x12\x11\n\ttarget_id\x18\x01 \x01(\x05\x12\r\n\x05\x63ount\x18\x02 \x01(\x05\x42\xb8\x01\n\x1c\x63om.google.firestore.v1beta1B\nWriteProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3' + '\n0google/cloud/firestore_v1beta1/proto/write.proto\x12\x18google.firestore.v1beta1\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\x9d\x02\n\x05Write\x12\x34\n\x06update\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.DocumentH\x00\x12\x10\n\x06\x64\x65lete\x18\x02 \x01(\tH\x00\x12@\n\ttransform\x18\x06 \x01(\x0b\x32+.google.firestore.v1beta1.DocumentTransformH\x00\x12;\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12@\n\x10\x63urrent_document\x18\x04 \x01(\x0b\x32&.google.firestore.v1beta1.PreconditionB\x0b\n\toperation"\x88\x05\n\x11\x44ocumentTransform\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12T\n\x10\x66ield_transforms\x18\x02 \x03(\x0b\x32:.google.firestore.v1beta1.DocumentTransform.FieldTransform\x1a\x8a\x04\n\x0e\x46ieldTransform\x12\x12\n\nfield_path\x18\x01 \x01(\t\x12\x65\n\x13set_to_server_value\x18\x02 \x01(\x0e\x32\x46.google.firestore.v1beta1.DocumentTransform.FieldTransform.ServerValueH\x00\x12\x34\n\tincrement\x18\x03 \x01(\x0b\x32\x1f.google.firestore.v1beta1.ValueH\x00\x12\x32\n\x07maximum\x18\x04 \x01(\x0b\x32\x1f.google.firestore.v1beta1.ValueH\x00\x12\x32\n\x07minimum\x18\x05 \x01(\x0b\x32\x1f.google.firestore.v1beta1.ValueH\x00\x12G\n\x17\x61ppend_missing_elements\x18\x06 \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00\x12\x45\n\x15remove_all_from_array\x18\x07 \x01(\x0b\x32$.google.firestore.v1beta1.ArrayValueH\x00"=\n\x0bServerValue\x12\x1c\n\x18SERVER_VALUE_UNSPECIFIED\x10\x00\x12\x10\n\x0cREQUEST_TIME\x10\x01\x42\x10\n\x0etransform_type"z\n\x0bWriteResult\x12/\n\x0bupdate_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x11transform_results\x18\x02 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value"v\n\x0e\x44ocumentChange\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12\x12\n\ntarget_ids\x18\x05 \x03(\x05\x12\x1a\n\x12removed_target_ids\x18\x06 \x03(\x05"m\n\x0e\x44ocumentDelete\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12\x1a\n\x12removed_target_ids\x18\x06 \x03(\x05\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"m\n\x0e\x44ocumentRemove\x12\x10\n\x08\x64ocument\x18\x01 \x01(\t\x12\x1a\n\x12removed_target_ids\x18\x02 \x03(\x05\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"3\n\x0f\x45xistenceFilter\x12\x11\n\ttarget_id\x18\x01 \x01(\x05\x12\r\n\x05\x63ount\x18\x02 \x01(\x05\x42\xb8\x01\n\x1c\x63om.google.firestore.v1beta1B\nWriteProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR, google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, ], ) diff --git a/firestore/synth.metadata b/firestore/synth.metadata index 6cf0ce610b80..942c38b56000 100644 --- a/firestore/synth.metadata +++ b/firestore/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-03-16T12:15:00.697965Z", + "updateTime": "2019-03-27T19:35:27.286829Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.17", - "dockerImage": "googleapis/artman@sha256:7231f27272231a884e09edb5953148c85ecd8467780d33c4a35c3e507885715b" + "version": "0.16.19", + "dockerImage": "googleapis/artman@sha256:70ba28fda87e032ae44e6df41b7fc342c1b0cce1ed90658c4890eb4f613038c2" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "dab002e28c81adcc5601278c36d4302c2624c8e2", - "internalRef": "238726437" + "sha": "1119e688a00927cb02a2361929f0ca3190f88466", + "internalRef": "240608914" } }, { @@ -46,4 +46,4 @@ } } ] -} +} \ No newline at end of file From 2c7c4471929d0a542d9388efe6535239dd22bfc2 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 27 Mar 2019 15:43:57 -0400 Subject: [PATCH 7/7] Accomodate / apply changes for Bidi RPC metadata. --- firestore/google/cloud/firestore_v1/watch.py | 3 ++- firestore/google/cloud/firestore_v1beta1/watch.py | 2 +- firestore/tests/unit/v1/test_cross_language.py | 3 ++- firestore/tests/unit/v1/test_watch.py | 4 +++- firestore/tests/unit/v1beta1/test_cross_language.py | 4 ++-- firestore/tests/unit/v1beta1/test_watch.py | 4 ++-- 6 files changed, 12 insertions(+), 8 deletions(-) diff --git a/firestore/google/cloud/firestore_v1/watch.py b/firestore/google/cloud/firestore_v1/watch.py index 3e829c6b08b2..4140a58ad8fe 100644 --- a/firestore/google/cloud/firestore_v1/watch.py +++ b/firestore/google/cloud/firestore_v1/watch.py @@ -213,9 +213,10 @@ def should_recover(exc): # pragma: NO COVER ResumableBidiRpc = self.ResumableBidiRpc # FBO unit tests self._rpc = ResumableBidiRpc( - self._api.transport._stubs["firestore_stub"].Listen, + self._api.transport.listen, initial_request=initial_request, should_recover=should_recover, + metadata=self._firestore._rpc_metadata, ) self._rpc.add_done_callback(self._on_rpc_done) diff --git a/firestore/google/cloud/firestore_v1beta1/watch.py b/firestore/google/cloud/firestore_v1beta1/watch.py index 9b60ece38420..63ded0d2d25b 100644 --- a/firestore/google/cloud/firestore_v1beta1/watch.py +++ b/firestore/google/cloud/firestore_v1beta1/watch.py @@ -216,7 +216,7 @@ def should_recover(exc): # pragma: NO COVER self._api.transport.listen, initial_request=initial_request, should_recover=should_recover, - rpc_metadata=self._firestore._rpc_metadata, + metadata=self._firestore._rpc_metadata, ) self._rpc.add_done_callback(self._on_rpc_done) diff --git a/firestore/tests/unit/v1/test_cross_language.py b/firestore/tests/unit/v1/test_cross_language.py index 89810f201fbb..36bf233f73aa 100644 --- a/firestore/tests/unit/v1/test_cross_language.py +++ b/firestore/tests/unit/v1/test_cross_language.py @@ -342,12 +342,13 @@ def convert_precondition(precond): class DummyRpc(object): # pragma: NO COVER - def __init__(self, listen, initial_request, should_recover): + def __init__(self, listen, initial_request, should_recover, metadata=None): self.listen = listen self.initial_request = initial_request self.should_recover = should_recover self.closed = False self.callbacks = [] + self._metadata = metadata def add_done_callback(self, callback): self.callbacks.append(callback) diff --git a/firestore/tests/unit/v1/test_watch.py b/firestore/tests/unit/v1/test_watch.py index be22809802da..b66060c12db7 100644 --- a/firestore/tests/unit/v1/test_watch.py +++ b/firestore/tests/unit/v1/test_watch.py @@ -713,6 +713,7 @@ def _to_protobuf(self): class DummyFirestore(object): _firestore_api = DummyFirestoreClient() _database_string = "abc://bar/" + _rpc_metadata = None def document(self, *document_path): # pragma: NO COVER if len(document_path) == 1: @@ -781,12 +782,13 @@ def Thread(self, name, target, kwargs): class DummyRpc(object): - def __init__(self, listen, initial_request, should_recover): + def __init__(self, listen, initial_request, should_recover, metadata=None): self.listen = listen self.initial_request = initial_request self.should_recover = should_recover self.closed = False self.callbacks = [] + self._metadata = metadata def add_done_callback(self, callback): self.callbacks.append(callback) diff --git a/firestore/tests/unit/v1beta1/test_cross_language.py b/firestore/tests/unit/v1beta1/test_cross_language.py index f9b8d0d42c70..bbcb39a19393 100644 --- a/firestore/tests/unit/v1beta1/test_cross_language.py +++ b/firestore/tests/unit/v1beta1/test_cross_language.py @@ -342,13 +342,13 @@ def convert_precondition(precond): class DummyRpc(object): # pragma: NO COVER - def __init__(self, listen, initial_request, should_recover, rpc_metadata=None): + def __init__(self, listen, initial_request, should_recover, metadata=None): self.listen = listen self.initial_request = initial_request self.should_recover = should_recover self.closed = False self.callbacks = [] - self._rpc_metadata = rpc_metadata + self._metadata = metadata def add_done_callback(self, callback): self.callbacks.append(callback) diff --git a/firestore/tests/unit/v1beta1/test_watch.py b/firestore/tests/unit/v1beta1/test_watch.py index 17bf4b46dc6f..6d8ba5a040bf 100644 --- a/firestore/tests/unit/v1beta1/test_watch.py +++ b/firestore/tests/unit/v1beta1/test_watch.py @@ -782,13 +782,13 @@ def Thread(self, name, target, kwargs): class DummyRpc(object): - def __init__(self, listen, initial_request, should_recover, rpc_metadata=None): + def __init__(self, listen, initial_request, should_recover, metadata=None): self.listen = listen self.initial_request = initial_request self.should_recover = should_recover self.closed = False self.callbacks = [] - self._rpc_metadata = rpc_metadata + self._metadata = metadata def add_done_callback(self, callback): self.callbacks.append(callback)