From 098dd2e5d1cd7127dda956abce9d7b0960bdb103 Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Tue, 6 May 2025 16:12:38 +0000 Subject: [PATCH 01/14] feat: Added read_time as a parameter to various calls (synchronous/base classes) --- google/cloud/firestore_v1/aggregation.py | 25 +++- google/cloud/firestore_v1/base_aggregation.py | 15 ++ google/cloud/firestore_v1/base_client.py | 15 +- google/cloud/firestore_v1/base_collection.py | 9 ++ google/cloud/firestore_v1/base_document.py | 17 ++- google/cloud/firestore_v1/base_query.py | 12 ++ google/cloud/firestore_v1/base_transaction.py | 5 + google/cloud/firestore_v1/client.py | 18 ++- google/cloud/firestore_v1/collection.py | 26 +++- google/cloud/firestore_v1/document.py | 20 ++- google/cloud/firestore_v1/query.py | 36 ++++- google/cloud/firestore_v1/transaction.py | 17 +++ tests/unit/v1/test_aggregation.py | 129 +++++++++++++--- tests/unit/v1/test_client.py | 54 +++++-- tests/unit/v1/test_collection.py | 67 +++++++-- tests/unit/v1/test_document.py | 64 ++++++-- tests/unit/v1/test_query.py | 139 +++++++++++++++--- tests/unit/v1/test_transaction.py | 28 +++- 18 files changed, 604 insertions(+), 92 deletions(-) diff --git a/google/cloud/firestore_v1/aggregation.py b/google/cloud/firestore_v1/aggregation.py index ec0fbc1894..423117f1fd 100644 --- a/google/cloud/firestore_v1/aggregation.py +++ b/google/cloud/firestore_v1/aggregation.py @@ -20,6 +20,8 @@ """ from __future__ import annotations +import datetime + from typing import TYPE_CHECKING, Any, Generator, List, Optional, Union from google.api_core import exceptions, gapic_v1 @@ -56,6 +58,7 @@ def get( timeout: float | None = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> QueryResultsList[AggregationResult]: """Runs the aggregation query. @@ -78,6 +81,10 @@ def get( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: QueryResultsList[AggregationResult]: The aggregation query results. @@ -90,6 +97,7 @@ def get( retry=retry, timeout=timeout, explain_options=explain_options, + read_time=read_time, ) result_list = list(result) @@ -100,13 +108,14 @@ def get( return QueryResultsList(result_list, explain_options, explain_metrics) - def _get_stream_iterator(self, transaction, retry, timeout, explain_options=None): + def _get_stream_iterator(self, transaction, retry, timeout, explain_options=None, read_time=None): """Helper method for :meth:`stream`.""" request, kwargs = self._prep_stream( transaction, retry, timeout, explain_options, + read_time, ) return self._client._firestore_api.run_aggregation_query( @@ -132,6 +141,7 @@ def _make_stream( retry: Union[retries.Retry, None, object] = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> Generator[List[AggregationResult], Any, Optional[ExplainMetrics]]: """Internal method for stream(). Runs the aggregation query. @@ -155,6 +165,10 @@ def _make_stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Yields: List[AggregationResult]: @@ -172,6 +186,7 @@ def _make_stream( retry, timeout, explain_options, + read_time, ) while True: try: @@ -182,6 +197,8 @@ def _make_stream( transaction, retry, timeout, + explain_options, + read_time, ) continue else: @@ -206,6 +223,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> StreamGenerator[List[AggregationResult]]: """Runs the aggregation query. @@ -229,6 +247,10 @@ def stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: `StreamGenerator[List[AggregationResult]]`: @@ -239,5 +261,6 @@ def stream( retry=retry, timeout=timeout, explain_options=explain_options, + read_time=read_time ) return StreamGenerator(inner_generator, explain_options) diff --git a/google/cloud/firestore_v1/base_aggregation.py b/google/cloud/firestore_v1/base_aggregation.py index 34a3baad81..d99c371094 100644 --- a/google/cloud/firestore_v1/base_aggregation.py +++ b/google/cloud/firestore_v1/base_aggregation.py @@ -21,6 +21,8 @@ from __future__ import annotations import abc +import datetime + from abc import ABC from typing import TYPE_CHECKING, Any, Coroutine, List, Optional, Tuple, Union @@ -205,6 +207,7 @@ def _prep_stream( retry: Union[retries.Retry, retries.AsyncRetry, None, object] = None, timeout: float | None = None, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> Tuple[dict, dict]: parent_path, expected_prefix = self._collection_ref._parent_info() request = { @@ -214,6 +217,8 @@ def _prep_stream( } if explain_options: request["explain_options"] = explain_options._to_dict() + if read_time is not None: + request["read_time"] = read_time kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, kwargs @@ -228,6 +233,7 @@ def get( timeout: float | None = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> ( QueryResultsList[AggregationResult] | Coroutine[Any, Any, List[List[AggregationResult]]] @@ -253,6 +259,10 @@ def get( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: (QueryResultsList[List[AggregationResult]] | Coroutine[Any, Any, List[List[AggregationResult]]]): @@ -270,6 +280,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> ( StreamGenerator[List[AggregationResult]] | AsyncStreamGenerator[List[AggregationResult]] @@ -291,6 +302,10 @@ def stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: StreamGenerator[List[AggregationResult]] | AsyncStreamGenerator[List[AggregationResult]]: diff --git a/google/cloud/firestore_v1/base_client.py b/google/cloud/firestore_v1/base_client.py index 9b1c0bccd4..c8504f39db 100644 --- a/google/cloud/firestore_v1/base_client.py +++ b/google/cloud/firestore_v1/base_client.py @@ -25,6 +25,7 @@ """ from __future__ import annotations +import datetime import os from typing import ( Any, @@ -437,6 +438,7 @@ def _prep_get_all( transaction: BaseTransaction | None = None, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, + read_time: Optional[datetime.datetime] = None, ) -> Tuple[dict, dict, dict]: """Shared setup for async/sync :meth:`get_all`.""" document_paths, reference_map = _reference_info(references) @@ -447,6 +449,8 @@ def _prep_get_all( "mask": mask, "transaction": _helpers.get_transaction_id(transaction), } + if read_time is not None: + request["read_time"] = read_time kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, reference_map, kwargs @@ -458,6 +462,8 @@ def get_all( transaction=None, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, + *, + read_time: Optional[datetime.datetime] = None, ) -> Union[ AsyncGenerator[DocumentSnapshot, Any], Generator[DocumentSnapshot, Any, Any] ]: @@ -467,9 +473,14 @@ def _prep_collections( self, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, + read_time: Optional[datetime.datetime] = None, ) -> Tuple[dict, dict]: """Shared setup for async/sync :meth:`collections`.""" - request = {"parent": "{}/documents".format(self._database_string)} + request = { + "parent": "{}/documents".format(self._database_string), + } + if read_time is not None: + request["read_time"] = read_time kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, kwargs @@ -478,6 +489,8 @@ def collections( self, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, + *, + read_time: Optional[datetime.datetime] = None, ): raise NotImplementedError diff --git a/google/cloud/firestore_v1/base_collection.py b/google/cloud/firestore_v1/base_collection.py index b74ced2a38..c3411d3c2e 100644 --- a/google/cloud/firestore_v1/base_collection.py +++ b/google/cloud/firestore_v1/base_collection.py @@ -15,7 +15,9 @@ """Classes for representing collections for the Google Cloud Firestore API.""" from __future__ import annotations +import datetime import random + from typing import ( TYPE_CHECKING, Any, @@ -202,6 +204,7 @@ def _prep_list_documents( page_size: Optional[int] = None, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: Optional[float] = None, + read_time: Optional[datetime.datetime] = None, ) -> Tuple[dict, dict]: """Shared setup for async / sync :method:`list_documents`""" parent, _ = self._parent_info() @@ -215,6 +218,8 @@ def _prep_list_documents( # to include no fields "mask": {"field_paths": None}, } + if read_time is not None: + request["read_time"] = read_time kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, kwargs @@ -224,6 +229,8 @@ def list_documents( page_size: Optional[int] = None, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: Optional[float] = None, + *, + read_time: Optional[datetime.datetime] = None, ) -> Union[ Generator[DocumentReference, Any, Any], AsyncGenerator[DocumentReference, Any] ]: @@ -497,6 +504,7 @@ def get( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> ( QueryResultsList[DocumentSnapshot] | Coroutine[Any, Any, QueryResultsList[DocumentSnapshot]] @@ -510,6 +518,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> StreamGenerator[DocumentSnapshot] | AsyncIterator[DocumentSnapshot]: raise NotImplementedError diff --git a/google/cloud/firestore_v1/base_document.py b/google/cloud/firestore_v1/base_document.py index b16b8abace..921c3aab7e 100644 --- a/google/cloud/firestore_v1/base_document.py +++ b/google/cloud/firestore_v1/base_document.py @@ -16,6 +16,8 @@ from __future__ import annotations import copy +import datetime + from typing import ( TYPE_CHECKING, Any, @@ -290,6 +292,7 @@ def _prep_batch_get( transaction=None, retry: retries.Retry | retries.AsyncRetry | None | object = None, timeout: float | None = None, + read_time: Optional[datetime.datetime] = None, ) -> Tuple[dict, dict]: """Shared setup for async/sync :meth:`get`.""" if isinstance(field_paths, str): @@ -306,6 +309,8 @@ def _prep_batch_get( "mask": mask, "transaction": _helpers.get_transaction_id(transaction), } + if read_time is not None: + request["read_time"] = read_time kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, kwargs @@ -316,6 +321,8 @@ def get( transaction=None, retry: retries.Retry | retries.AsyncRetry | None | object = None, timeout: float | None = None, + *, + read_time: Optional[datetime.datetime] = None, ) -> "DocumentSnapshot" | Awaitable["DocumentSnapshot"]: raise NotImplementedError @@ -324,9 +331,15 @@ def _prep_collections( page_size: int | None = None, retry: retries.Retry | retries.AsyncRetry | None | object = None, timeout: float | None = None, + read_time: Optional[datetime.datetime] = None, ) -> Tuple[dict, dict]: """Shared setup for async/sync :meth:`collections`.""" - request = {"parent": self._document_path, "page_size": page_size} + request = { + "parent": self._document_path, + "page_size": page_size, + } + if read_time is not None: + request["read_time"] = read_time kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, kwargs @@ -336,6 +349,8 @@ def collections( page_size: int | None = None, retry: retries.Retry | retries.AsyncRetry | None | object = None, timeout: float | None = None, + *, + read_time: Optional[datetime.datetime] = None, ): raise NotImplementedError diff --git a/google/cloud/firestore_v1/base_query.py b/google/cloud/firestore_v1/base_query.py index 5a9efaf783..31238da3d1 100644 --- a/google/cloud/firestore_v1/base_query.py +++ b/google/cloud/firestore_v1/base_query.py @@ -22,8 +22,10 @@ import abc import copy +import datetime import math import warnings + from typing import ( TYPE_CHECKING, Any, @@ -1031,6 +1033,7 @@ def get( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> ( QueryResultsList[DocumentSnapshot] | Coroutine[Any, Any, QueryResultsList[DocumentSnapshot]] @@ -1043,6 +1046,7 @@ def _prep_stream( retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: Optional[float] = None, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> Tuple[dict, str, dict]: """Shared setup for async / sync :meth:`stream`""" if self._limit_to_last: @@ -1059,6 +1063,8 @@ def _prep_stream( } if explain_options is not None: request["explain_options"] = explain_options._to_dict() + if read_time is not None: + request["read_time"] = read_time kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, expected_prefix, kwargs @@ -1070,6 +1076,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> ( StreamGenerator[document.DocumentSnapshot] | AsyncStreamGenerator[DocumentSnapshot] @@ -1426,6 +1433,7 @@ def _prep_get_partitions( partition_count, retry: retries.Retry | object | None = None, timeout: float | None = None, + read_time: Optional[datetime.datetime] = None, ) -> Tuple[dict, dict]: self._validate_partition_query() parent_path, expected_prefix = self._parent._parent_info() @@ -1442,6 +1450,8 @@ def _prep_get_partitions( "structured_query": query._to_protobuf(), "partition_count": partition_count, } + if read_time is not None: + request["read_time"] = read_time kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) return request, kwargs @@ -1451,6 +1461,8 @@ def get_partitions( partition_count, retry: Optional[retries.Retry] = None, timeout: Optional[float] = None, + *, + read_time: Optional[datetime.datetime] = None, ): raise NotImplementedError diff --git a/google/cloud/firestore_v1/base_transaction.py b/google/cloud/firestore_v1/base_transaction.py index 92e54c81c4..303131312b 100644 --- a/google/cloud/firestore_v1/base_transaction.py +++ b/google/cloud/firestore_v1/base_transaction.py @@ -15,6 +15,8 @@ """Helpers for applying Google Cloud Firestore changes in a transaction.""" from __future__ import annotations +import datetime + from typing import ( TYPE_CHECKING, Any, @@ -148,6 +150,8 @@ def get_all( references: list, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, + *, + read_time: Optional[datetime.datetime] = None, ) -> ( Generator[DocumentSnapshot, Any, None] | Coroutine[Any, Any, AsyncGenerator[DocumentSnapshot, Any]] @@ -161,6 +165,7 @@ def get( timeout: float | None = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> ( StreamGenerator[DocumentSnapshot] | Generator[DocumentSnapshot, Any, None] diff --git a/google/cloud/firestore_v1/client.py b/google/cloud/firestore_v1/client.py index 23c6b36ef2..ec394c9206 100644 --- a/google/cloud/firestore_v1/client.py +++ b/google/cloud/firestore_v1/client.py @@ -25,6 +25,8 @@ """ from __future__ import annotations +import datetime + from typing import TYPE_CHECKING, Any, Generator, Iterable, List, Optional, Union from google.api_core import gapic_v1 @@ -205,6 +207,8 @@ def get_all( transaction: Transaction | None = None, retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, + *, + read_time: Optional[datetime.datetime] = None, ) -> Generator[DocumentSnapshot, Any, None]: """Retrieve a batch of documents. @@ -239,13 +243,17 @@ def get_all( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Yields: .DocumentSnapshot: The next document snapshot that fulfills the query, or :data:`None` if the document does not exist. """ request, reference_map, kwargs = self._prep_get_all( - references, field_paths, transaction, retry, timeout + references, field_paths, transaction, retry, timeout, read_time ) response_iterator = self._firestore_api.batch_get_documents( @@ -261,6 +269,8 @@ def collections( self, retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, + *, + read_time: Optional[datetime.datetime] = None, ) -> Generator[Any, Any, None]: """List top-level collections of the client's database. @@ -269,12 +279,16 @@ def collections( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: Sequence[:class:`~google.cloud.firestore_v1.collection.CollectionReference`]: iterator of subcollections of the current document. """ - request, kwargs = self._prep_collections(retry, timeout) + request, kwargs = self._prep_collections(retry, timeout, read_time) iterator = self._firestore_api.list_collection_ids( request=request, diff --git a/google/cloud/firestore_v1/collection.py b/google/cloud/firestore_v1/collection.py index cd6929b688..e37f6ad0fc 100644 --- a/google/cloud/firestore_v1/collection.py +++ b/google/cloud/firestore_v1/collection.py @@ -15,6 +15,8 @@ """Classes for representing collections for the Google Cloud Firestore API.""" from __future__ import annotations +import datetime + from typing import TYPE_CHECKING, Any, Callable, Generator, Optional, Tuple, Union from google.api_core import gapic_v1 @@ -137,6 +139,8 @@ def list_documents( page_size: Union[int, None] = None, retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: Union[float, None] = None, + *, + read_time: Optional[datetime.datetime] = None, ) -> Generator[Any, Any, None]: """List all subdocuments of the current collection. @@ -148,6 +152,10 @@ def list_documents( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: Sequence[:class:`~google.cloud.firestore_v1.collection.DocumentReference`]: @@ -155,7 +163,9 @@ def list_documents( collection does not exist at the time of `snapshot`, the iterator will be empty """ - request, kwargs = self._prep_list_documents(page_size, retry, timeout) + request, kwargs = self._prep_list_documents( + page_size, retry, timeout, read_time + ) iterator = self._client._firestore_api.list_documents( request=request, @@ -174,6 +184,7 @@ def get( timeout: Union[float, None] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> QueryResultsList[DocumentSnapshot]: """Read the documents in this collection. @@ -192,6 +203,10 @@ def get( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. If a ``transaction`` is used and it already has write operations added, this method cannot be used (i.e. read-after-write is not @@ -204,6 +219,8 @@ def get( query, kwargs = self._prep_get_or_stream(retry, timeout) if explain_options is not None: kwargs["explain_options"] = explain_options + if read_time is not None: + kwargs["read_time"] = read_time return query.get(transaction=transaction, **kwargs) @@ -214,6 +231,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> StreamGenerator[DocumentSnapshot]: """Read the documents in this collection. @@ -245,6 +263,10 @@ def stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: `StreamGenerator[DocumentSnapshot]`: A generator of the query results. @@ -252,6 +274,8 @@ def stream( query, kwargs = self._prep_get_or_stream(retry, timeout) if explain_options: kwargs["explain_options"] = explain_options + if read_time is not None: + kwargs["read_time"] = read_time return query.stream(transaction=transaction, **kwargs) diff --git a/google/cloud/firestore_v1/document.py b/google/cloud/firestore_v1/document.py index 0c7d7872fd..c8248b9b89 100644 --- a/google/cloud/firestore_v1/document.py +++ b/google/cloud/firestore_v1/document.py @@ -16,7 +16,7 @@ from __future__ import annotations import datetime import logging -from typing import Any, Callable, Generator, Iterable +from typing import Any, Callable, Generator, Iterable, Optional from google.api_core import gapic_v1 from google.api_core import retry as retries @@ -365,6 +365,8 @@ def get( transaction=None, retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, + *, + read_time: Optional[datetime.datetime] = None, ) -> DocumentSnapshot: """Retrieve a snapshot of the current document. @@ -387,6 +389,10 @@ def get( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: :class:`~google.cloud.firestore_v1.base_document.DocumentSnapshot`: @@ -398,7 +404,9 @@ def get( """ from google.cloud.firestore_v1.base_client import _parse_batch_get - request, kwargs = self._prep_batch_get(field_paths, transaction, retry, timeout) + request, kwargs = self._prep_batch_get( + field_paths, transaction, retry, timeout, read_time + ) response_iter = self._client._firestore_api.batch_get_documents( request=request, @@ -434,6 +442,8 @@ def collections( page_size: int | None = None, retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, + *, + read_time: Optional[datetime.datetime] = None, ) -> Generator[Any, Any, None]: """List subcollections of the current document. @@ -445,6 +455,10 @@ def collections( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: Sequence[:class:`~google.cloud.firestore_v1.collection.CollectionReference`]: @@ -452,7 +466,7 @@ def collections( document does not exist at the time of `snapshot`, the iterator will be empty """ - request, kwargs = self._prep_collections(page_size, retry, timeout) + request, kwargs = self._prep_collections(page_size, retry, timeout, read_time) iterator = self._client._firestore_api.list_collection_ids( request=request, diff --git a/google/cloud/firestore_v1/query.py b/google/cloud/firestore_v1/query.py index 0b52afc834..a7c7034510 100644 --- a/google/cloud/firestore_v1/query.py +++ b/google/cloud/firestore_v1/query.py @@ -20,6 +20,8 @@ """ from __future__ import annotations +import datetime + from typing import TYPE_CHECKING, Any, Callable, Generator, List, Optional, Type from google.api_core import exceptions, gapic_v1 @@ -141,6 +143,7 @@ def get( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> QueryResultsList[DocumentSnapshot]: """Read the documents in the collection that match this query. @@ -162,6 +165,10 @@ def get( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. Returns: QueryResultsList[DocumentSnapshot]: The documents in the collection @@ -188,6 +195,7 @@ def get( retry=retry, timeout=timeout, explain_options=explain_options, + read_time=read_time, ) result_list = list(result) if is_limited_to_last: @@ -238,13 +246,16 @@ def _chunkify( ): return - def _get_stream_iterator(self, transaction, retry, timeout, explain_options=None): + def _get_stream_iterator( + self, transaction, retry, timeout, explain_options=None, read_time=None + ): """Helper method for :meth:`stream`.""" request, expected_prefix, kwargs = self._prep_stream( transaction, retry, timeout, explain_options, + read_time ) response_iterator = self._client._firestore_api.run_query( @@ -353,6 +364,7 @@ def _make_stream( retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> Generator[DocumentSnapshot, Any, Optional[ExplainMetrics]]: """Internal method for stream(). Read the documents in the collection that match this query. @@ -386,6 +398,10 @@ def _make_stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. Yields: DocumentSnapshot: @@ -402,6 +418,7 @@ def _make_stream( retry, timeout, explain_options, + read_time, ) last_snapshot = None @@ -416,6 +433,7 @@ def _make_stream( transaction, retry, timeout, + read_time=read_time, ) continue else: @@ -448,6 +466,7 @@ def stream( timeout: float | None = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> StreamGenerator[DocumentSnapshot]: """Read the documents in the collection that match this query. @@ -479,6 +498,10 @@ def stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. Returns: `StreamGenerator[DocumentSnapshot]`: A generator of the query results. @@ -488,6 +511,7 @@ def stream( retry=retry, timeout=timeout, explain_options=explain_options, + read_time=read_time, ) return StreamGenerator(inner_generator, explain_options) @@ -580,6 +604,8 @@ def get_partitions( partition_count, retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, + *, + read_time: Optional[datetime.datetime] = None, ) -> Generator[QueryPartition, None, None]: """Partition a query for parallelization. @@ -595,8 +621,14 @@ def get_partitions( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. """ - request, kwargs = self._prep_get_partitions(partition_count, retry, timeout) + request, kwargs = self._prep_get_partitions( + partition_count, retry, timeout, read_time + ) pager = self._client._firestore_api.partition_query( request=request, diff --git a/google/cloud/firestore_v1/transaction.py b/google/cloud/firestore_v1/transaction.py index 37afd5fb00..76c50499c1 100644 --- a/google/cloud/firestore_v1/transaction.py +++ b/google/cloud/firestore_v1/transaction.py @@ -15,6 +15,8 @@ """Helpers for applying Google Cloud Firestore changes in a transaction.""" from __future__ import annotations +import datetime + from typing import TYPE_CHECKING, Any, Callable, Generator, Optional from google.api_core import exceptions, gapic_v1 @@ -154,6 +156,8 @@ def get_all( references: list, retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, + *, + read_time: Optional[datetime.datetime] = None, ) -> Generator[DocumentSnapshot, Any, None]: """Retrieves multiple documents from Firestore. @@ -164,12 +168,18 @@ def get_all( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Yields: .DocumentSnapshot: The next document snapshot that fulfills the query, or :data:`None` if the document does not exist. """ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) + if read_time is not None: + kwargs["read_time"] = read_time return self._client.get_all(references, transaction=self, **kwargs) def get( @@ -179,6 +189,7 @@ def get( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> StreamGenerator[DocumentSnapshot] | Generator[DocumentSnapshot, Any, None]: """Retrieve a document or a query result from the database. @@ -194,6 +205,10 @@ def get( Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. Can only be used when running a query, not a document reference. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Yields: .DocumentSnapshot: The next document snapshot that fulfills the @@ -205,6 +220,8 @@ def get( reference. """ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) + if read_time is not None: + kwargs["read_time"] = read_time if isinstance(ref_or_query, DocumentReference): if explain_options is not None: raise ValueError( diff --git a/tests/unit/v1/test_aggregation.py b/tests/unit/v1/test_aggregation.py index 4d1eed1980..7e870e1139 100644 --- a/tests/unit/v1/test_aggregation.py +++ b/tests/unit/v1/test_aggregation.py @@ -26,6 +26,8 @@ from google.cloud.firestore_v1.query_profile import ExplainMetrics, QueryExplainError from google.cloud.firestore_v1.query_results import QueryResultsList from google.cloud.firestore_v1.stream_generator import StreamGenerator +from google.cloud.firestore_v1.types import RunAggregationQueryResponse +from google.protobuf.timestamp_pb2 import Timestamp from tests.unit.v1._test_helpers import ( make_aggregation_query, make_aggregation_query_response, @@ -384,11 +386,74 @@ def test_aggregation_query_prep_stream_with_explain_options(): assert kwargs == {"retry": None} +def test_aggregation_query_prep_stream_with_read_time(): + client = make_client() + parent = client.collection("dee") + query = make_query(parent) + aggregation_query = make_aggregation_query(query) + + aggregation_query.count(alias="all") + aggregation_query.sum("someref", alias="sumall") + aggregation_query.avg("anotherref", alias="avgall") + + # 1800 seconds after epoch + read_time = datetime.now() + + request, kwargs = aggregation_query._prep_stream(read_time=read_time) + + parent_path, _ = parent._parent_info() + expected_request = { + "parent": parent_path, + "structured_aggregation_query": aggregation_query._to_protobuf(), + "transaction": None, + "read_time": read_time, + } + assert request == expected_request + assert kwargs == {"retry": None} + + +@pytest.mark.parametrize("timezone", [None, timezone.utc, timezone(timedelta(hours=5))]) +def test_aggregation_query_get_stream_iterator_read_time_different_timezones(timezone): + client = make_client() + parent = client.collection("dee") + query = make_query(parent) + aggregation_query = make_aggregation_query(query) + + aggregation_query.count(alias="all") + aggregation_query.sum("someref", alias="sumall") + aggregation_query.avg("anotherref", alias="avgall") + + # 1800 seconds after epoch + read_time = datetime(1970, 1, 1, 0, 30) + if timezone is not None: + read_time = read_time.astimezone(timezone) + + # The internal firestore API needs to be initialized before it gets mocked. + client._firestore_api + + # Validate that the same timestamp_pb object would be sent in the actual request. + with mock.patch.object( + type(client._firestore_api_internal.transport.run_aggregation_query), "__call__" + ) as call: + call.return_value = iter([RunAggregationQueryResponse()]) + aggregation_query._get_stream_iterator( + transaction=None, retry=None, timeout=None, read_time=read_time + ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request_read_time = args[0].read_time + + # Verify that the timestamp is correct. + expected_timestamp = Timestamp(seconds=1800) + assert request_read_time.timestamp_pb() == expected_timestamp + + def _aggregation_query_get_helper( retry=None, timeout=None, - read_time=None, explain_options=None, + response_read_time=None, + query_read_time=None, ): from google.cloud._helpers import _datetime_to_pb_timestamp @@ -411,7 +476,9 @@ def _aggregation_query_get_helper( aggregation_query = make_aggregation_query(query) aggregation_query.count(alias="all") - aggregation_result = AggregationResult(alias="total", value=5, read_time=read_time) + aggregation_result = AggregationResult( + alias="total", value=5, read_time=response_read_time, + ) if explain_options is not None: explain_metrics = {"execution_stats": {"results_returned": 1}} @@ -419,14 +486,18 @@ def _aggregation_query_get_helper( explain_metrics = None response_pb = make_aggregation_query_response( [aggregation_result], - read_time=read_time, + read_time=response_read_time, explain_metrics=explain_metrics, ) firestore_api.run_aggregation_query.return_value = iter([response_pb]) kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) # Execute the query and check the response. - returned = aggregation_query.get(**kwargs, explain_options=explain_options) + returned = aggregation_query.get( + **kwargs, + explain_options=explain_options, + read_time=query_read_time, + ) assert isinstance(returned, QueryResultsList) assert len(returned) == 1 @@ -434,9 +505,9 @@ def _aggregation_query_get_helper( for r in result: assert r.alias == aggregation_result.alias assert r.value == aggregation_result.value - if read_time is not None: + if response_read_time is not None: result_datetime = _datetime_to_pb_timestamp(r.read_time) - assert result_datetime == read_time + assert result_datetime == response_read_time assert returned._explain_options == explain_options assert returned.explain_options == explain_options @@ -457,6 +528,8 @@ def _aggregation_query_get_helper( } if explain_options is not None: expected_request["explain_options"] = explain_options._to_dict() + if query_read_time is not None: + expected_request["read_time"] = query_read_time # Verify the mock call. firestore_api.run_aggregation_query.assert_called_once_with( @@ -473,9 +546,11 @@ def test_aggregation_query_get(): def test_aggregation_query_get_with_readtime(): from google.cloud._helpers import _datetime_to_pb_timestamp - one_hour_ago = datetime.now(tz=timezone.utc) - timedelta(hours=1) - read_time = _datetime_to_pb_timestamp(one_hour_ago) - _aggregation_query_get_helper(read_time=read_time) + query_read_time = datetime.now(tz=timezone.utc) - timedelta(hours=1) + response_read_time = _datetime_to_pb_timestamp(query_read_time) + _aggregation_query_get_helper( + response_read_time=response_read_time, query_read_time=query_read_time + ) def test_aggregation_query_get_retry_timeout(): @@ -555,6 +630,7 @@ def _aggregation_query_stream_w_retriable_exc_helper( timeout=None, transaction=None, expect_retry=True, + read_time=None, ): from google.api_core import exceptions, gapic_v1 @@ -628,24 +704,31 @@ def _stream_w_exception(*_args, **_kw): expected_transaction_id = transaction.id else: expected_transaction_id = None + + expected_request = { + "parent": parent_path, + "structured_aggregation_query": aggregation_query._to_protobuf(), + "transaction": expected_transaction_id, + } + if read_time is not None: + expected_request["read_time"] = read_time assert calls[0] == mock.call( - request={ - "parent": parent_path, - "structured_aggregation_query": aggregation_query._to_protobuf(), - "transaction": expected_transaction_id, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) if expect_retry: + expected_request = { + "parent": parent_path, + "structured_aggregation_query": aggregation_query._to_protobuf(), + "transaction": None, + } + if read_time is not None: + expected_request["read_time"] = None assert calls[1] == mock.call( - request={ - "parent": parent_path, - "structured_aggregation_query": aggregation_query._to_protobuf(), - "transaction": None, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) @@ -713,7 +796,9 @@ def _aggregation_query_stream_helper( kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) # Execute the query and check the response. - returned = aggregation_query.stream(**kwargs, explain_options=explain_options) + returned = aggregation_query.stream( + **kwargs, explain_options=explain_options, read_time=read_time + ) assert isinstance(returned, StreamGenerator) results = [] @@ -743,6 +828,8 @@ def _aggregation_query_stream_helper( } if explain_options is not None: expected_request["explain_options"] = explain_options._to_dict() + if read_time is not None: + expected_request["read_time"] = read_time # Verify the mock call. firestore_api.run_aggregation_query.assert_called_once_with( @@ -756,7 +843,7 @@ def test_aggregation_query_stream(): _aggregation_query_stream_helper() -def test_aggregation_query_stream_with_readtime(): +def test_aggregation_query_stream_with_read_time(): from google.cloud._helpers import _datetime_to_pb_timestamp one_hour_ago = datetime.now(tz=timezone.utc) - timedelta(hours=1) diff --git a/tests/unit/v1/test_client.py b/tests/unit/v1/test_client.py index edb411c9ff..df3ae15b41 100644 --- a/tests/unit/v1/test_client.py +++ b/tests/unit/v1/test_client.py @@ -281,7 +281,7 @@ def test_client_document_factory_w_nested_path(database): assert isinstance(document2, DocumentReference) -def _collections_helper(retry=None, timeout=None, database=None): +def _collections_helper(retry=None, timeout=None, database=None, read_time=None): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.collection import CollectionReference @@ -298,7 +298,7 @@ def __iter__(self): client._firestore_api_internal = firestore_api kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) - collections = list(client.collections(**kwargs)) + collections = list(client.collections(read_time=read_time, **kwargs)) assert len(collections) == len(collection_ids) for collection, collection_id in zip(collections, collection_ids): @@ -307,8 +307,13 @@ def __iter__(self): assert collection.id == collection_id base_path = client._database_string + "/documents" + expected_request = { + "parent": base_path, + } + if read_time is not None: + expected_request["read_time"] = read_time firestore_api.list_collection_ids.assert_called_once_with( - request={"parent": base_path}, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) @@ -328,6 +333,12 @@ def test_client_collections_w_retry_timeout(database): _collections_helper(retry=retry, timeout=timeout, database=database) +@pytest.mark.parametrize("database", [None, DEFAULT_DATABASE, "somedb"]) +def test_client_collections_read_time(database): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + _collections_helper(database=database, read_time=read_time) + + def _invoke_get_all(client, references, document_pbs, **kwargs): # Create a minimal fake GAPIC with a dummy response. firestore_api = mock.Mock(spec=["batch_get_documents"]) @@ -345,7 +356,12 @@ def _invoke_get_all(client, references, document_pbs, **kwargs): def _get_all_helper( - num_snapshots=2, txn_id=None, retry=None, timeout=None, database=None + num_snapshots=2, + txn_id=None, + retry=None, + timeout=None, + database=None, + read_time=None, ): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.async_document import DocumentSnapshot @@ -355,13 +371,13 @@ def _get_all_helper( data1 = {"a": "cheese"} document1 = client.document("pineapple", "lamp1") - document_pb1, read_time = _doc_get_info(document1._document_path, data1) - response1 = _make_batch_response(found=document_pb1, read_time=read_time) + document_pb1, doc_read_time = _doc_get_info(document1._document_path, data1) + response1 = _make_batch_response(found=document_pb1, read_time=doc_read_time) data2 = {"b": True, "c": 18} document2 = client.document("pineapple", "lamp2") - document, read_time = _doc_get_info(document2._document_path, data2) - response2 = _make_batch_response(found=document, read_time=read_time) + document, doc_read_time = _doc_get_info(document2._document_path, data2) + response2 = _make_batch_response(found=document, read_time=doc_read_time) document3 = client.document("pineapple", "lamp3") response3 = _make_batch_response(missing=document3._document_path) @@ -384,6 +400,7 @@ def _get_all_helper( documents, responses, field_paths=field_paths, + read_time=read_time, **kwargs, ) @@ -402,14 +419,17 @@ def _get_all_helper( mask = common.DocumentMask(field_paths=field_paths) kwargs.pop("transaction", None) + expected_request = { + "database": client._database_string, + "documents": doc_paths, + "mask": mask, + "transaction": txn_id, + } + if read_time is not None: + expected_request["read_time"] = read_time client._firestore_api.batch_get_documents.assert_called_once_with( - request={ - "database": client._database_string, - "documents": doc_paths, - "mask": mask, - "transaction": txn_id, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) @@ -440,6 +460,12 @@ def test_client_get_all_wrong_order(database): _get_all_helper(num_snapshots=3, database=database) +@pytest.mark.parametrize("database", [None, DEFAULT_DATABASE, "somedb"]) +def test_client_get_all_read_time(database): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + _get_all_helper(database=database, read_time=read_time) + + @pytest.mark.parametrize("database", [None, DEFAULT_DATABASE, "somedb"]) def test_client_get_all_unknown_result(database): from google.cloud.firestore_v1.base_client import _BAD_DOC_TEMPLATE diff --git a/tests/unit/v1/test_collection.py b/tests/unit/v1/test_collection.py index 29f76108d1..da91651b95 100644 --- a/tests/unit/v1/test_collection.py +++ b/tests/unit/v1/test_collection.py @@ -16,6 +16,7 @@ import mock +from datetime import datetime, timezone from tests.unit.v1._test_helpers import DEFAULT_TEST_PROJECT @@ -266,7 +267,7 @@ def test_add_w_retry_timeout(): _add_helper(retry=retry, timeout=timeout) -def _list_documents_helper(page_size=None, retry=None, timeout=None): +def _list_documents_helper(page_size=None, retry=None, timeout=None, read_time=None): from google.api_core.page_iterator import Iterator, Page from google.cloud.firestore_v1 import _helpers as _fs_v1_helpers @@ -299,9 +300,15 @@ def _next_page(self): kwargs = _fs_v1_helpers.make_retry_timeout_kwargs(retry, timeout) if page_size is not None: - documents = list(collection.list_documents(page_size=page_size, **kwargs)) + documents = list( + collection.list_documents( + page_size=page_size, + **kwargs, + read_time=read_time, + ) + ) else: - documents = list(collection.list_documents(**kwargs)) + documents = list(collection.list_documents(**kwargs, read_time=read_time)) # Verify the response and the mocks. assert len(documents) == len(document_ids) @@ -311,14 +318,18 @@ def _next_page(self): assert document.id == document_id parent, _ = collection._parent_info() + expected_request = { + "parent": parent, + "collection_id": collection.id, + "page_size": page_size, + "show_missing": True, + "mask": {"field_paths": None}, + } + if read_time is not None: + expected_request["read_time"] = read_time + api_client.list_documents.assert_called_once_with( - request={ - "parent": parent, - "collection_id": collection.id, - "page_size": page_size, - "show_missing": True, - "mask": {"field_paths": None}, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) @@ -340,6 +351,10 @@ def test_list_documents_w_page_size(): _list_documents_helper(page_size=25) +def test_list_documents_w_read_time(): + _list_documents_helper(read_time=datetime.now()) + + @mock.patch("google.cloud.firestore_v1.query.Query", autospec=True) def test_get(query_class): collection = _make_collection_reference("collection") @@ -403,6 +418,22 @@ def test_get_w_explain_options(query_class): ) +@mock.patch("google.cloud.firestore_v1.query.Query", autospec=True) +def test_get_w_read_time(query_class): + read_time = datetime.now(tz=timezone.utc) + collection = _make_collection_reference("collection") + get_response = collection.get(read_time=read_time) + + query_class.assert_called_once_with(collection) + query_instance = query_class.return_value + + assert get_response is query_instance.get.return_value + query_instance.get.assert_called_once_with( + transaction=None, + read_time=read_time, + ) + + @mock.patch("google.cloud.firestore_v1.query.Query", autospec=True) def test_stream(query_class): collection = _make_collection_reference("collection") @@ -463,6 +494,22 @@ def test_stream_w_explain_options(query_class): ) +@mock.patch("google.cloud.firestore_v1.query.Query", autospec=True) +def test_stream_w_read_time(query_class): + read_time = datetime.now(tz=timezone.utc) + collection = _make_collection_reference("collection") + get_response = collection.stream(read_time=read_time) + + query_class.assert_called_once_with(collection) + query_instance = query_class.return_value + + assert get_response is query_instance.stream.return_value + query_instance.stream.assert_called_once_with( + transaction=None, + read_time=read_time, + ) + + @mock.patch("google.cloud.firestore_v1.collection.Watch", autospec=True) def test_on_snapshot(watch): collection = _make_collection_reference("collection") diff --git a/tests/unit/v1/test_document.py b/tests/unit/v1/test_document.py index b9116ae61d..cc9ece9749 100644 --- a/tests/unit/v1/test_document.py +++ b/tests/unit/v1/test_document.py @@ -16,6 +16,9 @@ import mock import pytest +from datetime import datetime + +from google.protobuf import timestamp_pb2 from tests.unit.v1._test_helpers import DEFAULT_TEST_PROJECT @@ -393,6 +396,7 @@ def _get_helper( retry=None, timeout=None, database=None, + read_time=None, ): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.transaction import Transaction @@ -401,10 +405,14 @@ def _get_helper( # Create a minimal fake GAPIC with a dummy response. create_time = 123 update_time = 234 - read_time = 345 + if read_time: + response_read_time = timestamp_pb2.Timestamp() + response_read_time.FromDatetime(read_time) + else: + response_read_time = 345 firestore_api = mock.Mock(spec=["batch_get_documents"]) response = mock.create_autospec(firestore.BatchGetDocumentsResponse) - response.read_time = read_time + response.read_time = response_read_time response.found = mock.create_autospec(document.Document) response.found.fields = {} response.found.create_time = create_time @@ -435,7 +443,10 @@ def WhichOneof(val): kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) snapshot = document_reference.get( - field_paths=field_paths, transaction=transaction, **kwargs + field_paths=field_paths, + transaction=transaction, + **kwargs, + read_time=read_time, ) assert snapshot.reference is document_reference @@ -448,7 +459,7 @@ def WhichOneof(val): else: assert snapshot.to_dict() == {} assert snapshot.exists - assert snapshot.read_time is read_time + assert snapshot.read_time is response_read_time assert snapshot.create_time is create_time assert snapshot.update_time is update_time @@ -462,14 +473,18 @@ def WhichOneof(val): expected_transaction_id = transaction_id else: expected_transaction_id = None + + expected_request = { + "database": client._database_string, + "documents": [document_reference._document_path], + "mask": mask, + "transaction": expected_transaction_id, + } + if read_time is not None: + expected_request["read_time"] = read_time firestore_api.batch_get_documents.assert_called_once_with( - request={ - "database": client._database_string, - "documents": [document_reference._document_path], - "mask": mask, - "transaction": expected_transaction_id, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) @@ -520,7 +535,14 @@ def test_documentreference_get_with_transaction(database): _get_helper(use_transaction=True, database=database) -def _collections_helper(page_size=None, retry=None, timeout=None, database=None): +@pytest.mark.parametrize("database", [None, "somedb"]) +def test_documentreference_get_with_read_time(database): + _get_helper(read_time=datetime.now(), database=database) + + +def _collections_helper( + page_size=None, retry=None, timeout=None, read_time=None, database=None +): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.collection import CollectionReference from google.cloud.firestore_v1.services.firestore.client import FirestoreClient @@ -541,9 +563,11 @@ def __iter__(self): # Actually make a document and call delete(). document = _make_document_reference("where", "we-are", client=client) if page_size is not None: - collections = list(document.collections(page_size=page_size, **kwargs)) + collections = list( + document.collections(page_size=page_size, **kwargs, read_time=read_time) + ) else: - collections = list(document.collections(**kwargs)) + collections = list(document.collections(**kwargs, read_time=read_time)) # Verify the response and the mocks. assert len(collections) == len(collection_ids) @@ -551,9 +575,16 @@ def __iter__(self): assert isinstance(collection, CollectionReference) assert collection.parent == document assert collection.id == collection_id + + expected_result = { + "parent": document._document_path, + "page_size": page_size, + } + if read_time is not None: + expected_result["read_time"] = read_time api_client.list_collection_ids.assert_called_once_with( - request={"parent": document._document_path, "page_size": page_size}, + request=expected_result, metadata=client._rpc_metadata, **kwargs, ) @@ -578,6 +609,11 @@ def test_documentreference_collections_w_retry_timeout(database): _collections_helper(retry=retry, timeout=timeout, database=database) +@pytest.mark.parametrize("database", [None, "somedb"]) +def test_documentreference_collections_w_read_time(database): + _collections_helper(read_time=datetime.now(), database=database) + + @mock.patch("google.cloud.firestore_v1.document.Watch", autospec=True) def test_documentreference_on_snapshot(watch): client = mock.Mock(_database_string="sprinklez", spec=["_database_string"]) diff --git a/tests/unit/v1/test_query.py b/tests/unit/v1/test_query.py index f30a4fcdff..9193dcb48f 100644 --- a/tests/unit/v1/test_query.py +++ b/tests/unit/v1/test_query.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime import types import mock @@ -42,6 +43,7 @@ def _query_get_helper( timeout=None, database=None, explain_options=None, + read_time=None, ): from google.cloud.firestore_v1 import _helpers @@ -71,7 +73,7 @@ def _query_get_helper( # Execute the query and check the response. query = make_query(parent) - returned = query.get(**kwargs, explain_options=explain_options) + returned = query.get(**kwargs, explain_options=explain_options, read_time=read_time) assert isinstance(returned, QueryResultsList) assert len(returned) == 1 @@ -97,6 +99,8 @@ def _query_get_helper( } if explain_options: request["explain_options"] = explain_options._to_dict() + if read_time: + request["read_time"] = read_time # Verify the mock call. firestore_api.run_query.assert_called_once_with( @@ -118,6 +122,11 @@ def test_query_get_w_retry_timeout(): _query_get_helper(retry=retry, timeout=timeout) +def test_query_get_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + _query_get_helper(read_time=read_time) + + @pytest.mark.parametrize("database", [None, "somedb"]) def test_query_get_limit_to_last(database): from google.cloud import firestore @@ -338,6 +347,7 @@ def _query_stream_helper( timeout=None, database=None, explain_options=None, + read_time=None, ): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.stream_generator import StreamGenerator @@ -369,7 +379,9 @@ def _query_stream_helper( # Execute the query and check the response. query = make_query(parent) - get_response = query.stream(**kwargs, explain_options=explain_options) + get_response = query.stream( + **kwargs, explain_options=explain_options, read_time=read_time + ) assert isinstance(get_response, StreamGenerator) returned = list(get_response) @@ -396,6 +408,8 @@ def _query_stream_helper( } if explain_options is not None: request["explain_options"] = explain_options._to_dict() + if read_time is not None: + request["read_time"] = read_time # Verify the mock call. firestore_api.run_query.assert_called_once_with( @@ -417,6 +431,11 @@ def test_query_stream_w_retry_timeout(): _query_stream_helper(retry=retry, timeout=timeout) +def test_query_stream_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + _query_stream_helper(read_time=read_time) + + @pytest.mark.parametrize("database", [None, "somedb"]) def test_query_stream_with_limit_to_last(database): # Attach the fake GAPIC to a real client. @@ -480,6 +499,57 @@ def test_query_stream_with_transaction(database): ) +@pytest.mark.parametrize("database", [None, "somedb"]) +def test_query_stream_with_transaction_and_read_time(database): + from google.cloud.firestore_v1.stream_generator import StreamGenerator + + # Create a minimal fake GAPIC. + firestore_api = mock.Mock(spec=["run_query"]) + + # Attach the fake GAPIC to a real client. + client = make_client(database=database) + client._firestore_api_internal = firestore_api + + # Create a real-ish transaction for this client. + transaction = client.transaction() + txn_id = b"\x00\x00\x01-work-\xf2" + transaction._id = txn_id + + # Create a read_time for this client. + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + + # Make a **real** collection reference as parent. + parent = client.collection("declaration") + + # Add a dummy response to the minimal fake GAPIC. + parent_path, expected_prefix = parent._parent_info() + name = "{}/burger".format(expected_prefix) + data = {"lettuce": b"\xee\x87"} + response_pb = _make_query_response(name=name, data=data) + firestore_api.run_query.return_value = iter([response_pb]) + + # Execute the query and check the response. + query = make_query(parent) + get_response = query.stream(transaction=transaction, read_time=read_time) + assert isinstance(get_response, StreamGenerator) + returned = list(get_response) + assert len(returned) == 1 + snapshot = returned[0] + assert snapshot.reference._path == ("declaration", "burger") + assert snapshot.to_dict() == data + + # Verify the mock call. + firestore_api.run_query.assert_called_once_with( + request={ + "parent": parent_path, + "structured_query": query._to_protobuf(), + "transaction": txn_id, + "read_time": read_time, + }, + metadata=client._rpc_metadata, + ) + + @pytest.mark.parametrize("database", [None, "somedb"]) def test_query_stream_no_results(database): from google.cloud.firestore_v1.stream_generator import StreamGenerator @@ -690,7 +760,12 @@ def test_query_stream_w_collection_group(database): def _query_stream_w_retriable_exc_helper( - retry=_not_passed, timeout=None, transaction=None, expect_retry=True, database=None + retry=_not_passed, + timeout=None, + transaction=None, + expect_retry=True, + database=None, + read_time=None, ): from google.api_core import exceptions, gapic_v1 @@ -734,7 +809,7 @@ def _stream_w_exception(*_args, **_kw): # Execute the query and check the response. query = make_query(parent) - get_response = query.stream(transaction=transaction, **kwargs) + get_response = query.stream(transaction=transaction, read_time=read_time, **kwargs) assert isinstance(get_response, StreamGenerator) if expect_retry: @@ -762,25 +837,32 @@ def _stream_w_exception(*_args, **_kw): expected_transaction_id = transaction.id else: expected_transaction_id = None + + expected_request = { + "parent": parent_path, + "structured_query": query._to_protobuf(), + "transaction": expected_transaction_id, + } + if read_time is not None: + expected_request["read_time"] = read_time assert calls[0] == mock.call( - request={ - "parent": parent_path, - "structured_query": query._to_protobuf(), - "transaction": expected_transaction_id, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) if expect_retry: new_query = query.start_after(snapshot) + expected_request = { + "parent": parent_path, + "structured_query": new_query._to_protobuf(), + "transaction": None, + } + if read_time is not None: + expected_request["read_time"] = read_time assert calls[1] == mock.call( - request={ - "parent": parent_path, - "structured_query": new_query._to_protobuf(), - "transaction": None, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) @@ -804,6 +886,11 @@ def test_query_stream_w_retriable_exc_w_transaction(): _query_stream_w_retriable_exc_helper(transaction=txn) +def test_query_stream_w_retriable_exc_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + _query_stream_w_retriable_exc_helper(read_time=read_time) + + def test_query_stream_w_explain_options(): from google.cloud.firestore_v1.query_profile import ExplainOptions @@ -842,7 +929,9 @@ def test_collection_group_constructor_all_descendents_is_false(): _make_collection_group(mock.sentinel.parent, all_descendants=False) -def _collection_group_get_partitions_helper(retry=None, timeout=None, database=None): +def _collection_group_get_partitions_helper( + retry=None, timeout=None, database=None, read_time=None + ): from google.cloud.firestore_v1 import _helpers # Create a minimal fake GAPIC. @@ -868,7 +957,7 @@ def _collection_group_get_partitions_helper(retry=None, timeout=None, database=N # Execute the query and check the response. query = _make_collection_group(parent) - get_response = query.get_partitions(2, **kwargs) + get_response = query.get_partitions(2, read_time=read_time, **kwargs) assert isinstance(get_response, types.GeneratorType) returned = list(get_response) @@ -880,12 +969,15 @@ def _collection_group_get_partitions_helper(retry=None, timeout=None, database=N parent, orders=(query._make_order("__name__", query.ASCENDING),), ) + expected_request = { + "parent": parent_path, + "structured_query": partition_query._to_protobuf(), + "partition_count": 2, + } + if read_time is not None: + expected_request["read_time"] = read_time firestore_api.partition_query.assert_called_once_with( - request={ - "parent": parent_path, - "structured_query": partition_query._to_protobuf(), - "partition_count": 2, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) @@ -903,6 +995,11 @@ def test_collection_group_get_partitions_w_retry_timeout(): _collection_group_get_partitions_helper(retry=retry, timeout=timeout) +def test_collection_group_get_partitions_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + _collection_group_get_partitions_helper(read_time=read_time) + + @pytest.mark.parametrize("database", [None, "somedb"]) def test_collection_group_get_partitions_w_filter(database): # Make a **real** collection reference as parent. diff --git a/tests/unit/v1/test_transaction.py b/tests/unit/v1/test_transaction.py index 941e294dbd..fe42932b32 100644 --- a/tests/unit/v1/test_transaction.py +++ b/tests/unit/v1/test_transaction.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime import mock import pytest @@ -312,13 +313,15 @@ def test_transaction__commit_failure(database): ) -def _transaction_get_all_helper(retry=None, timeout=None): +def _transaction_get_all_helper(retry=None, timeout=None, read_time=None): from google.cloud.firestore_v1 import _helpers client = mock.Mock(spec=["get_all"]) transaction = _make_transaction(client) ref1, ref2 = mock.Mock(), mock.Mock() kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) + if read_time is not None: + kwargs["read_time"] = read_time result = transaction.get_all([ref1, ref2], **kwargs) @@ -342,10 +345,16 @@ def test_transaction_get_all_w_retry_timeout(): _transaction_get_all_helper(retry=retry, timeout=timeout) +def test_transaction_get_all_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + _transaction_get_all_helper(read_time=read_time) + + def _transaction_get_w_document_ref_helper( retry=None, timeout=None, explain_options=None, + read_time=None, ): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.document import DocumentReference @@ -357,6 +366,8 @@ def _transaction_get_w_document_ref_helper( if explain_options is not None: kwargs["explain_options"] = explain_options + if read_time is not None: + kwargs["read_time"] = read_time result = transaction.get(ref, **kwargs) @@ -388,10 +399,17 @@ def test_transaction_get_w_document_ref_w_explain_options(): ) +def test_transaction_get_w_document_ref(): + _transaction_get_w_document_ref_helper( + read_time=datetime.datetime.now(tz=datetime.timezone.utc) + ) + + def _transaction_get_w_query_helper( retry=None, timeout=None, explain_options=None, + read_time=None, ): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.query import Query @@ -434,6 +452,7 @@ def _transaction_get_w_query_helper( query, **kwargs, explain_options=explain_options, + read_time=read_time, ) # Verify the response. @@ -462,6 +481,8 @@ def _transaction_get_w_query_helper( } if explain_options is not None: request["explain_options"] = explain_options._to_dict() + if read_time is not None: + request["read_time"] = read_time # Verify the mock call. firestore_api.run_query.assert_called_once_with( @@ -489,6 +510,11 @@ def test_transaction_get_w_query_w_explain_options(): _transaction_get_w_query_helper(explain_options=ExplainOptions(analyze=True)) +def test_transaction_get_w_query_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + _transaction_get_w_query_helper(read_time=read_time) + + @pytest.mark.parametrize("database", [None, "somedb"]) def test_transaction_get_failure(database): client = _make_client(database=database) From 4444ef1afd54caefc5af96bd3aba5c5ef2c58408 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 13 May 2025 18:49:48 +0000 Subject: [PATCH 02/14] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- google/cloud/firestore_v1/aggregation.py | 6 ++++-- google/cloud/firestore_v1/query.py | 10 +++------- tests/unit/v1/test_aggregation.py | 6 ++++-- tests/unit/v1/test_document.py | 4 ++-- tests/unit/v1/test_query.py | 6 +++--- 5 files changed, 16 insertions(+), 16 deletions(-) diff --git a/google/cloud/firestore_v1/aggregation.py b/google/cloud/firestore_v1/aggregation.py index 423117f1fd..8d3feb696d 100644 --- a/google/cloud/firestore_v1/aggregation.py +++ b/google/cloud/firestore_v1/aggregation.py @@ -108,7 +108,9 @@ def get( return QueryResultsList(result_list, explain_options, explain_metrics) - def _get_stream_iterator(self, transaction, retry, timeout, explain_options=None, read_time=None): + def _get_stream_iterator( + self, transaction, retry, timeout, explain_options=None, read_time=None + ): """Helper method for :meth:`stream`.""" request, kwargs = self._prep_stream( transaction, @@ -261,6 +263,6 @@ def stream( retry=retry, timeout=timeout, explain_options=explain_options, - read_time=read_time + read_time=read_time, ) return StreamGenerator(inner_generator, explain_options) diff --git a/google/cloud/firestore_v1/query.py b/google/cloud/firestore_v1/query.py index a7c7034510..066d9baf7b 100644 --- a/google/cloud/firestore_v1/query.py +++ b/google/cloud/firestore_v1/query.py @@ -247,15 +247,11 @@ def _chunkify( return def _get_stream_iterator( - self, transaction, retry, timeout, explain_options=None, read_time=None - ): + self, transaction, retry, timeout, explain_options=None, read_time=None + ): """Helper method for :meth:`stream`.""" request, expected_prefix, kwargs = self._prep_stream( - transaction, - retry, - timeout, - explain_options, - read_time + transaction, retry, timeout, explain_options, read_time ) response_iterator = self._client._firestore_api.run_query( diff --git a/tests/unit/v1/test_aggregation.py b/tests/unit/v1/test_aggregation.py index 7e870e1139..a6e8461acb 100644 --- a/tests/unit/v1/test_aggregation.py +++ b/tests/unit/v1/test_aggregation.py @@ -477,7 +477,9 @@ def _aggregation_query_get_helper( aggregation_query.count(alias="all") aggregation_result = AggregationResult( - alias="total", value=5, read_time=response_read_time, + alias="total", + value=5, + read_time=response_read_time, ) if explain_options is not None: @@ -704,7 +706,7 @@ def _stream_w_exception(*_args, **_kw): expected_transaction_id = transaction.id else: expected_transaction_id = None - + expected_request = { "parent": parent_path, "structured_aggregation_query": aggregation_query._to_protobuf(), diff --git a/tests/unit/v1/test_document.py b/tests/unit/v1/test_document.py index cc9ece9749..3a2a3701e0 100644 --- a/tests/unit/v1/test_document.py +++ b/tests/unit/v1/test_document.py @@ -473,7 +473,7 @@ def WhichOneof(val): expected_transaction_id = transaction_id else: expected_transaction_id = None - + expected_request = { "database": client._database_string, "documents": [document_reference._document_path], @@ -575,7 +575,7 @@ def __iter__(self): assert isinstance(collection, CollectionReference) assert collection.parent == document assert collection.id == collection_id - + expected_result = { "parent": document._document_path, "page_size": page_size, diff --git a/tests/unit/v1/test_query.py b/tests/unit/v1/test_query.py index 9193dcb48f..b8c37cf848 100644 --- a/tests/unit/v1/test_query.py +++ b/tests/unit/v1/test_query.py @@ -837,7 +837,7 @@ def _stream_w_exception(*_args, **_kw): expected_transaction_id = transaction.id else: expected_transaction_id = None - + expected_request = { "parent": parent_path, "structured_query": query._to_protobuf(), @@ -930,8 +930,8 @@ def test_collection_group_constructor_all_descendents_is_false(): def _collection_group_get_partitions_helper( - retry=None, timeout=None, database=None, read_time=None - ): + retry=None, timeout=None, database=None, read_time=None +): from google.cloud.firestore_v1 import _helpers # Create a minimal fake GAPIC. From 86c89bd6e995f18ff8b3c6073b13f9519db77f4e Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Wed, 21 May 2025 14:02:21 +0000 Subject: [PATCH 03/14] fixed tests + added system tests --- tests/system/test_system.py | 245 ++++++++++++++++++++++++++++++ tests/unit/v1/test_aggregation.py | 9 +- tests/unit/v1/test_transaction.py | 2 +- 3 files changed, 253 insertions(+), 3 deletions(-) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index d82d5113fa..779dc61d65 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -217,6 +217,40 @@ def test_collection_stream_or_get_w_explain_options_analyze_true( assert len(execution_stats.debug_stats) > 0 +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +def test_collections_w_read_time(client, cleanup, database): + first_collection_id = "doc-create" + UNIQUE_RESOURCE_ID + first_document_id = "doc" + UNIQUE_RESOURCE_ID + first_document = client.document(first_collection_id, first_document_id) + # Add to clean-up before API request (in case ``create()`` fails). + cleanup(first_document.delete) + + data = {"status": "new"} + write_result = first_document.create(data) + read_time = write_result.update_time + num_collections = len(list(client.collections())) + + second_collection_id = "doc-create" + UNIQUE_RESOURCE_ID + "-2" + second_document_id = "doc" + UNIQUE_RESOURCE_ID + "-2" + second_document = client.document(second_collection_id, second_document_id) + cleanup(second_document.delete) + second_document.create(data) + + # We're just testing that we added one collection at read_time, not two. + collections = list(client.collections(read_time=read_time)) + assert len(collections) == num_collections + ids = [collection.id for collection in collections] + assert second_collection_id not in ids + assert first_collection_id in ids + + # Test that listing current collections does have the second id. + curr_collections = list(client.collections()) + assert len(curr_collections) == num_collections + 1 + ids = [collection.id for collection in curr_collections] + assert second_collection_id in ids + assert first_collection_id in ids + + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_create_document(client, cleanup, database): now = datetime.datetime.now(tz=datetime.timezone.utc) @@ -708,6 +742,42 @@ def assert_timestamp_less(timestamp_pb1, timestamp_pb2): assert timestamp_pb1 < timestamp_pb2 +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +def test_document_collections_w_read_time(client, cleanup, database): + collection_id = "doc-create-sub" + UNIQUE_RESOURCE_ID + document_id = "doc" + UNIQUE_RESOURCE_ID + document = client.document(collection_id, document_id) + # Add to clean-up before API request (in case ``create()`` fails). + cleanup(document.delete) + + data = {"now": firestore.SERVER_TIMESTAMP} + document.create(data) + + original_child_ids = ["child1", "child2"] + read_time = None + + for child_id in original_child_ids: + subcollection = document.collection(child_id) + update_time, subdoc = subcollection.add({"foo": "bar"}) + read_time = ( + update_time if read_time is None or update_time > read_time else read_time + ) + cleanup(subdoc.delete) + + update_time, newdoc = document.collection("child3").add({"foo": "bar"}) + cleanup(newdoc.delete) + assert update_time > read_time + + # Compare the query at read_time to the query at new update time. + original_children = document.collections(read_time=read_time) + assert sorted(child.id for child in original_children) == sorted(original_child_ids) + + original_children = document.collections() + assert sorted(child.id for child in original_children) == sorted( + original_child_ids + ["child3"] + ) + + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_no_document(client, database): document_id = "no_document" + UNIQUE_RESOURCE_ID @@ -1072,6 +1142,31 @@ def test_collection_add(client, cleanup, database): assert set(collection3.list_documents()) == {document_ref5} +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +def test_list_collections_with_read_time(client, cleanup, database): + # TODO(microgen): list_documents is returning a generator, not a list. + # Consider if this is desired. Also, Document isn't hashable. + collection_id = "coll-add" + UNIQUE_RESOURCE_ID + collection = client.collection(collection_id) + + assert set(collection.list_documents()) == set() + + data1 = {"foo": "bar"} + update_time1, document_ref1 = collection.add(data1) + cleanup(document_ref1.delete) + assert set(collection.list_documents()) == {document_ref1} + + data2 = {"bar": "baz"} + update_time2, document_ref2 = collection.add(data2) + cleanup(document_ref2.delete) + assert set(collection.list_documents()) == {document_ref1, document_ref2} + assert set(collection.list_documents(read_time=update_time1)) == {document_ref1} + assert set(collection.list_documents(read_time=update_time2)) == { + document_ref1, + document_ref2, + } + + @pytest.fixture def query_docs(client, database): collection_id = "qs" + UNIQUE_RESOURCE_ID @@ -1450,6 +1545,44 @@ def test_query_stream_or_get_w_explain_options_analyze_false( explain_metrics.execution_stats +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +def test_query_stream_w_read_time(query_docs, cleanup, database): + collection, stored, allowed_vals = query_docs + num_vals = len(allowed_vals) + + # Find the most recent read_time in collections + read_time = max(docref.get().read_time for docref in collection.list_documents()) + new_data = { + "a": 9000, + "b": 1, + "c": [10000, 1000], + "stats": {"sum": 9001, "product": 9000}, + } + _, new_ref = collection.add(new_data) + # Add to clean-up. + cleanup(new_ref.delete) + stored[new_ref.id] = new_data + + # Compare query at read_time to query at current time. + query = collection.where(filter=FieldFilter("b", "==", 1)) + values = { + snapshot.id: snapshot.to_dict() + for snapshot in query.stream(read_time=read_time) + } + assert len(values) == num_vals + assert new_ref.id not in values + for key, value in values.items(): + assert stored[key] == value + assert value["b"] == 1 + assert value["a"] != 9000 + assert key != new_ref + + new_values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} + assert len(new_values) == num_vals + 1 + assert new_ref.id in new_values + assert new_values[new_ref.id] == new_data + + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_query_with_order_dot_key(client, cleanup, database): db = client @@ -1760,6 +1893,7 @@ def test_get_all(client, cleanup, database): document3 = client.document(collection_name, "c") # Add to clean-up before API requests (in case ``create()`` fails). cleanup(document1.delete) + cleanup(document2.delete) cleanup(document3.delete) data1 = {"a": {"b": 2, "c": 3}, "d": 4, "e": 0} @@ -1767,6 +1901,8 @@ def test_get_all(client, cleanup, database): data3 = {"a": {"b": 5, "c": 6}, "d": 7, "e": 100} write_result3 = document3.create(data3) + read_time = write_result3.update_time + # 0. Get 3 unique documents, one of which is missing. snapshots = list(client.get_all([document1, document2, document3])) @@ -1802,6 +1938,27 @@ def test_get_all(client, cleanup, database): restricted3 = {"a": {"b": data3["a"]["b"]}, "d": data3["d"]} check_snapshot(snapshot3, document3, restricted3, write_result3) + # 3. Use ``read_time`` in ``get_all`` + new_data = {"a": {"b": 8, "c": 9}, "d": 10, "e": 1010} + document1.update(new_data) + document2.create(new_data) + document3.update(new_data) + + snapshots = list( + client.get_all([document1, document2, document3], read_time=read_time) + ) + assert snapshots[0].exists + assert snapshots[1].exists + assert not snapshots[2].exists + + snapshots = [snapshot for snapshot in snapshots if snapshot.exists] + id_attr = operator.attrgetter("id") + snapshots.sort(key=id_attr) + + snapshot1, snapshot3 = snapshots + check_snapshot(snapshot1, document1, data1, write_result1) + check_snapshot(snapshot3, document3, data3, write_result3) + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_batch(client, cleanup, database): @@ -3015,6 +3172,48 @@ def test_query_with_or_composite_filter(collection, database): assert lt_10 > 0 +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +@pytest.mark.parametrize( + "aggregation_type,expected_value", [("count", 5), ("sum", 100), ("avg", 4.0)] +) +def test_aggregation_queries_with_read_time( + collection, query, cleanup, database, aggregation_type, expected_value +): + """ + Ensure that all aggregation queries work when read_time is passed into + a query..().get() method + """ + # Find the most recent read_time in collections + read_time = max(docref.get().read_time for docref in collection.list_documents()) + document_data = { + "a": 1, + "b": 9000, + "c": [1, 123123123], + "stats": {"sum": 9001, "product": 9000}, + } + + _, doc_ref = collection.add(document_data) + cleanup(doc_ref.delete) + + if aggregation_type == "count": + aggregation_query = query.count() + elif aggregation_type == "sum": + aggregation_query = collection.sum("stats.product") + elif aggregation_type == "avg": + aggregation_query = collection.avg("stats.product") + + # Check that adding the new document data affected the results of the aggregation queries. + new_result = aggregation_query.get() + assert len(new_result) == 1 + for r in new_result[0]: + assert r.value != expected_value + + old_result = aggregation_query.get(read_time=read_time) + assert len(old_result) == 1 + for r in old_result[0]: + assert r.value == expected_value + + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_query_with_complex_composite_filter(collection, database): field_filter = FieldFilter("b", "==", 0) @@ -3228,6 +3427,52 @@ def in_transaction(transaction): assert inner_fn_ran is True +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +def test_query_in_transaction_with_read_time(client, cleanup, database): + """ + Test query profiling in transactions. + """ + collection_id = "doc-create" + UNIQUE_RESOURCE_ID + doc_ids = [f"doc{i}" + UNIQUE_RESOURCE_ID for i in range(5)] + doc_refs = [client.document(collection_id, doc_id) for doc_id in doc_ids] + for doc_ref in doc_refs: + cleanup(doc_ref.delete) + doc_refs[0].create({"a": 1, "b": 2}) + doc_refs[1].create({"a": 1, "b": 1}) + + read_time = max(docref.get().read_time for docref in doc_refs) + doc_refs[2].create({"a": 1, "b": 3}) + + collection = client.collection(collection_id) + query = collection.where(filter=FieldFilter("a", "==", 1)) + + with client.transaction() as transaction: + # should work when transaction is initiated through transactional decorator + @firestore.transactional + def in_transaction(transaction): + global inner_fn_ran + + new_b_values = [ + docs.get("b") for docs in transaction.get(query, read_time=read_time) + ] + assert len(new_b_values) == 2 + assert 1 in new_b_values + assert 2 in new_b_values + assert 3 not in new_b_values + + new_b_values = [docs.get("b") for docs in transaction.get(query)] + assert len(new_b_values) == 3 + assert 1 in new_b_values + assert 2 in new_b_values + assert 3 in new_b_values + + inner_fn_ran = True + + in_transaction(transaction) + # make sure we didn't skip assertions in inner function + assert inner_fn_ran is True + + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_update_w_uuid(client, cleanup, database): """ diff --git a/tests/unit/v1/test_aggregation.py b/tests/unit/v1/test_aggregation.py index a6e8461acb..d77dabfc89 100644 --- a/tests/unit/v1/test_aggregation.py +++ b/tests/unit/v1/test_aggregation.py @@ -676,7 +676,7 @@ def _stream_w_exception(*_args, **_kw): query = make_query(parent) aggregation_query = make_aggregation_query(query) - get_response = aggregation_query.stream(transaction=transaction, **kwargs) + get_response = aggregation_query.stream(transaction=transaction, **kwargs, read_time=read_time) assert isinstance(get_response, stream_generator.StreamGenerator) if expect_retry: @@ -728,7 +728,8 @@ def _stream_w_exception(*_args, **_kw): "transaction": None, } if read_time is not None: - expected_request["read_time"] = None + expected_request["read_time"] = read_time + assert calls[1] == mock.call( request=expected_request, metadata=client._rpc_metadata, @@ -746,6 +747,10 @@ def test_aggregation_query_stream_w_retriable_exc_w_retry(): _aggregation_query_stream_w_retriable_exc_helper(retry=retry, expect_retry=False) +def test_aggregation_query_stream_w_retriable_exc_w_read_time(): + _aggregation_query_stream_w_retriable_exc_helper(read_time=datetime.now(tz=timezone.utc)) + + def test_aggregation_query_stream_w_retriable_exc_w_transaction(): from google.cloud.firestore_v1 import transaction diff --git a/tests/unit/v1/test_transaction.py b/tests/unit/v1/test_transaction.py index fe42932b32..2fe215abc9 100644 --- a/tests/unit/v1/test_transaction.py +++ b/tests/unit/v1/test_transaction.py @@ -399,7 +399,7 @@ def test_transaction_get_w_document_ref_w_explain_options(): ) -def test_transaction_get_w_document_ref(): +def test_transaction_get_w_document_ref_w_read_time(): _transaction_get_w_document_ref_helper( read_time=datetime.datetime.now(tz=datetime.timezone.utc) ) From 669464082de0b59c0f5282600d02f015f8f07dd8 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Wed, 21 May 2025 14:05:03 +0000 Subject: [PATCH 04/14] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- tests/unit/v1/test_aggregation.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/unit/v1/test_aggregation.py b/tests/unit/v1/test_aggregation.py index d77dabfc89..767089e986 100644 --- a/tests/unit/v1/test_aggregation.py +++ b/tests/unit/v1/test_aggregation.py @@ -676,7 +676,9 @@ def _stream_w_exception(*_args, **_kw): query = make_query(parent) aggregation_query = make_aggregation_query(query) - get_response = aggregation_query.stream(transaction=transaction, **kwargs, read_time=read_time) + get_response = aggregation_query.stream( + transaction=transaction, **kwargs, read_time=read_time + ) assert isinstance(get_response, stream_generator.StreamGenerator) if expect_retry: @@ -748,7 +750,9 @@ def test_aggregation_query_stream_w_retriable_exc_w_retry(): def test_aggregation_query_stream_w_retriable_exc_w_read_time(): - _aggregation_query_stream_w_retriable_exc_helper(read_time=datetime.now(tz=timezone.utc)) + _aggregation_query_stream_w_retriable_exc_helper( + read_time=datetime.now(tz=timezone.utc) + ) def test_aggregation_query_stream_w_retriable_exc_w_transaction(): From afa1877a71c49429542c3bdee6bd37bb8af062fa Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Wed, 21 May 2025 17:28:21 +0000 Subject: [PATCH 05/14] Removed specific system test assertions --- tests/system/test_system.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 779dc61d65..8dc43a4a7d 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -236,20 +236,19 @@ def test_collections_w_read_time(client, cleanup, database): cleanup(second_document.delete) second_document.create(data) - # We're just testing that we added one collection at read_time, not two. - collections = list(client.collections(read_time=read_time)) - assert len(collections) == num_collections - ids = [collection.id for collection in collections] - assert second_collection_id not in ids - assert first_collection_id in ids - # Test that listing current collections does have the second id. curr_collections = list(client.collections()) - assert len(curr_collections) == num_collections + 1 + assert len(curr_collections) > num_collections ids = [collection.id for collection in curr_collections] assert second_collection_id in ids assert first_collection_id in ids + # We're just testing that we added one collection at read_time, not two. + collections = list(client.collections(read_time=read_time)) + ids = [collection.id for collection in collections] + assert second_collection_id not in ids + assert first_collection_id in ids + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) def test_create_document(client, cleanup, database): From 5b5804e3187ea6790a9187a9022b176b92bb770e Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Thu, 22 May 2025 14:26:05 +0000 Subject: [PATCH 06/14] added system test with python datetimes --- tests/system/test_system.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 8dc43a4a7d..53e8f57205 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -1142,7 +1142,10 @@ def test_collection_add(client, cleanup, database): @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) -def test_list_collections_with_read_time(client, cleanup, database): +@pytest.mark.parametrize( + "use_python_datetime", [True, False] +) +def test_list_collections_with_read_time(client, cleanup, database, use_python_datetime): # TODO(microgen): list_documents is returning a generator, not a list. # Consider if this is desired. Also, Document isn't hashable. collection_id = "coll-add" + UNIQUE_RESOURCE_ID @@ -1152,11 +1155,15 @@ def test_list_collections_with_read_time(client, cleanup, database): data1 = {"foo": "bar"} update_time1, document_ref1 = collection.add(data1) + if use_python_datetime: + update_time1 = datetime.datetime.now(tz=datetime.timezone.utc) cleanup(document_ref1.delete) assert set(collection.list_documents()) == {document_ref1} data2 = {"bar": "baz"} update_time2, document_ref2 = collection.add(data2) + if use_python_datetime: + update_time2 = datetime.datetime.now(tz=datetime.timezone.utc) cleanup(document_ref2.delete) assert set(collection.list_documents()) == {document_ref1, document_ref2} assert set(collection.list_documents(read_time=update_time1)) == {document_ref1} From 2224d5a2880c2988971ccb9728af18f623b15a25 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 22 May 2025 14:29:40 +0000 Subject: [PATCH 07/14] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- tests/system/test_system.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 53e8f57205..d10196ad16 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -1142,10 +1142,10 @@ def test_collection_add(client, cleanup, database): @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) -@pytest.mark.parametrize( - "use_python_datetime", [True, False] -) -def test_list_collections_with_read_time(client, cleanup, database, use_python_datetime): +@pytest.mark.parametrize("use_python_datetime", [True, False]) +def test_list_collections_with_read_time( + client, cleanup, database, use_python_datetime +): # TODO(microgen): list_documents is returning a generator, not a list. # Consider if this is desired. Also, Document isn't hashable. collection_id = "coll-add" + UNIQUE_RESOURCE_ID From 8e1baa592d3617db65337586b645ea12145e382b Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Thu, 22 May 2025 15:03:02 +0000 Subject: [PATCH 08/14] revised type hints --- google/cloud/firestore_v1/base_client.py | 8 ++++---- google/cloud/firestore_v1/base_document.py | 8 ++++---- google/cloud/firestore_v1/base_query.py | 2 +- google/cloud/firestore_v1/base_transaction.py | 2 +- google/cloud/firestore_v1/client.py | 4 ++-- google/cloud/firestore_v1/document.py | 4 ++-- google/cloud/firestore_v1/transaction.py | 2 +- 7 files changed, 15 insertions(+), 15 deletions(-) diff --git a/google/cloud/firestore_v1/base_client.py b/google/cloud/firestore_v1/base_client.py index c8504f39db..acbd148fbb 100644 --- a/google/cloud/firestore_v1/base_client.py +++ b/google/cloud/firestore_v1/base_client.py @@ -438,7 +438,7 @@ def _prep_get_all( transaction: BaseTransaction | None = None, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, - read_time: Optional[datetime.datetime] = None, + read_time: datetime.datetime | None = None, ) -> Tuple[dict, dict, dict]: """Shared setup for async/sync :meth:`get_all`.""" document_paths, reference_map = _reference_info(references) @@ -463,7 +463,7 @@ def get_all( retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, *, - read_time: Optional[datetime.datetime] = None, + read_time: datetime.datetime | None = None, ) -> Union[ AsyncGenerator[DocumentSnapshot, Any], Generator[DocumentSnapshot, Any, Any] ]: @@ -473,7 +473,7 @@ def _prep_collections( self, retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, - read_time: Optional[datetime.datetime] = None, + read_time: datetime.datetime | None = None, ) -> Tuple[dict, dict]: """Shared setup for async/sync :meth:`collections`.""" request = { @@ -490,7 +490,7 @@ def collections( retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, *, - read_time: Optional[datetime.datetime] = None, + read_time: datetime.datetime | None = None, ): raise NotImplementedError diff --git a/google/cloud/firestore_v1/base_document.py b/google/cloud/firestore_v1/base_document.py index 921c3aab7e..e5f54ef280 100644 --- a/google/cloud/firestore_v1/base_document.py +++ b/google/cloud/firestore_v1/base_document.py @@ -292,7 +292,7 @@ def _prep_batch_get( transaction=None, retry: retries.Retry | retries.AsyncRetry | None | object = None, timeout: float | None = None, - read_time: Optional[datetime.datetime] = None, + read_time: datetime.datetime | None = None, ) -> Tuple[dict, dict]: """Shared setup for async/sync :meth:`get`.""" if isinstance(field_paths, str): @@ -322,7 +322,7 @@ def get( retry: retries.Retry | retries.AsyncRetry | None | object = None, timeout: float | None = None, *, - read_time: Optional[datetime.datetime] = None, + read_time: datetime.datetime | None = None, ) -> "DocumentSnapshot" | Awaitable["DocumentSnapshot"]: raise NotImplementedError @@ -331,7 +331,7 @@ def _prep_collections( page_size: int | None = None, retry: retries.Retry | retries.AsyncRetry | None | object = None, timeout: float | None = None, - read_time: Optional[datetime.datetime] = None, + read_time: datetime.datetime | None = None, ) -> Tuple[dict, dict]: """Shared setup for async/sync :meth:`collections`.""" request = { @@ -350,7 +350,7 @@ def collections( retry: retries.Retry | retries.AsyncRetry | None | object = None, timeout: float | None = None, *, - read_time: Optional[datetime.datetime] = None, + read_time: datetime.datetime | None = None, ): raise NotImplementedError diff --git a/google/cloud/firestore_v1/base_query.py b/google/cloud/firestore_v1/base_query.py index 31238da3d1..f39d5b5242 100644 --- a/google/cloud/firestore_v1/base_query.py +++ b/google/cloud/firestore_v1/base_query.py @@ -1433,7 +1433,7 @@ def _prep_get_partitions( partition_count, retry: retries.Retry | object | None = None, timeout: float | None = None, - read_time: Optional[datetime.datetime] = None, + read_time: datetime.datetime | None = None, ) -> Tuple[dict, dict]: self._validate_partition_query() parent_path, expected_prefix = self._parent._parent_info() diff --git a/google/cloud/firestore_v1/base_transaction.py b/google/cloud/firestore_v1/base_transaction.py index 303131312b..5e1fb74424 100644 --- a/google/cloud/firestore_v1/base_transaction.py +++ b/google/cloud/firestore_v1/base_transaction.py @@ -151,7 +151,7 @@ def get_all( retry: retries.Retry | retries.AsyncRetry | object | None = None, timeout: float | None = None, *, - read_time: Optional[datetime.datetime] = None, + read_time: datetime.datetime | None = None, ) -> ( Generator[DocumentSnapshot, Any, None] | Coroutine[Any, Any, AsyncGenerator[DocumentSnapshot, Any]] diff --git a/google/cloud/firestore_v1/client.py b/google/cloud/firestore_v1/client.py index ec394c9206..f98e329a29 100644 --- a/google/cloud/firestore_v1/client.py +++ b/google/cloud/firestore_v1/client.py @@ -208,7 +208,7 @@ def get_all( retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, *, - read_time: Optional[datetime.datetime] = None, + read_time: datetime.datetime | None = None, ) -> Generator[DocumentSnapshot, Any, None]: """Retrieve a batch of documents. @@ -270,7 +270,7 @@ def collections( retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, *, - read_time: Optional[datetime.datetime] = None, + read_time: datetime.datetime | None = None, ) -> Generator[Any, Any, None]: """List top-level collections of the client's database. diff --git a/google/cloud/firestore_v1/document.py b/google/cloud/firestore_v1/document.py index c8248b9b89..72ac85e9d3 100644 --- a/google/cloud/firestore_v1/document.py +++ b/google/cloud/firestore_v1/document.py @@ -366,7 +366,7 @@ def get( retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, *, - read_time: Optional[datetime.datetime] = None, + read_time: datetime.datetime | None = None, ) -> DocumentSnapshot: """Retrieve a snapshot of the current document. @@ -443,7 +443,7 @@ def collections( retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, *, - read_time: Optional[datetime.datetime] = None, + read_time: datetime.datetime | None = None, ) -> Generator[Any, Any, None]: """List subcollections of the current document. diff --git a/google/cloud/firestore_v1/transaction.py b/google/cloud/firestore_v1/transaction.py index 76c50499c1..8efafa96cc 100644 --- a/google/cloud/firestore_v1/transaction.py +++ b/google/cloud/firestore_v1/transaction.py @@ -157,7 +157,7 @@ def get_all( retry: retries.Retry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, *, - read_time: Optional[datetime.datetime] = None, + read_time: datetime.datetime | None = None, ) -> Generator[DocumentSnapshot, Any, None]: """Retrieves multiple documents from Firestore. From cd359bb8826aa328a5fa8dd9e585832cb203be6e Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Thu, 22 May 2025 17:18:13 +0000 Subject: [PATCH 09/14] linting --- google/cloud/firestore_v1/document.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/firestore_v1/document.py b/google/cloud/firestore_v1/document.py index 72ac85e9d3..4e0132e492 100644 --- a/google/cloud/firestore_v1/document.py +++ b/google/cloud/firestore_v1/document.py @@ -16,7 +16,7 @@ from __future__ import annotations import datetime import logging -from typing import Any, Callable, Generator, Iterable, Optional +from typing import Any, Callable, Generator, Iterable from google.api_core import gapic_v1 from google.api_core import retry as retries From b835963ffb14f381555c81b26ef45e7a42881946 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Tue, 27 May 2025 16:02:58 -0400 Subject: [PATCH 10/14] feat: Added read_time as a parameter to various calls (async classes) (#1059) * feat: Added read_time as a parameter to various calls (async classes) * used TYPE_CHECKING; fixed unit tests * linting + fixing cover * final linting --- .../cloud/firestore_v1/async_aggregation.py | 19 ++ google/cloud/firestore_v1/async_client.py | 22 +- google/cloud/firestore_v1/async_collection.py | 26 +- google/cloud/firestore_v1/async_document.py | 18 +- google/cloud/firestore_v1/async_query.py | 31 ++- .../cloud/firestore_v1/async_transaction.py | 17 ++ tests/system/test_system.py | 9 +- tests/system/test_system_async.py | 255 ++++++++++++++++++ tests/unit/v1/test_async_aggregation.py | 73 ++++- tests/unit/v1/test_async_client.py | 55 +++- tests/unit/v1/test_async_collection.py | 66 ++++- tests/unit/v1/test_async_document.py | 61 ++++- tests/unit/v1/test_async_query.py | 105 +++++++- tests/unit/v1/test_async_transaction.py | 44 ++- 14 files changed, 723 insertions(+), 78 deletions(-) diff --git a/google/cloud/firestore_v1/async_aggregation.py b/google/cloud/firestore_v1/async_aggregation.py index 3f3a1b9f43..e273f514ab 100644 --- a/google/cloud/firestore_v1/async_aggregation.py +++ b/google/cloud/firestore_v1/async_aggregation.py @@ -37,6 +37,7 @@ from google.cloud.firestore_v1.base_aggregation import AggregationResult from google.cloud.firestore_v1.query_profile import ExplainMetrics, ExplainOptions import google.cloud.firestore_v1.types.query_profile as query_profile_pb + import datetime class AsyncAggregationQuery(BaseAggregationQuery): @@ -55,6 +56,7 @@ async def get( timeout: float | None = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> QueryResultsList[List[AggregationResult]]: """Runs the aggregation query. @@ -75,6 +77,10 @@ async def get( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: QueryResultsList[List[AggregationResult]]: The aggregation query results. @@ -87,6 +93,7 @@ async def get( retry=retry, timeout=timeout, explain_options=explain_options, + read_time=read_time, ) try: result = [aggregation async for aggregation in stream_result] @@ -106,6 +113,7 @@ async def _make_stream( retry: retries.AsyncRetry | object | None = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> AsyncGenerator[List[AggregationResult] | query_profile_pb.ExplainMetrics, Any]: """Internal method for stream(). Runs the aggregation query. @@ -130,6 +138,10 @@ async def _make_stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Yields: List[AggregationResult] | query_profile_pb.ExplainMetrics: @@ -143,6 +155,7 @@ async def _make_stream( retry, timeout, explain_options, + read_time, ) response_iterator = await self._client._firestore_api.run_aggregation_query( @@ -167,6 +180,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> AsyncStreamGenerator[List[AggregationResult]]: """Runs the aggregation query. @@ -190,6 +204,10 @@ def stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: `AsyncStreamGenerator[List[AggregationResult]]`: @@ -201,5 +219,6 @@ def stream( retry=retry, timeout=timeout, explain_options=explain_options, + read_time=read_time, ) return AsyncStreamGenerator(inner_generator, explain_options) diff --git a/google/cloud/firestore_v1/async_client.py b/google/cloud/firestore_v1/async_client.py index 275bcb9b61..15b31af314 100644 --- a/google/cloud/firestore_v1/async_client.py +++ b/google/cloud/firestore_v1/async_client.py @@ -48,8 +48,10 @@ grpc_asyncio as firestore_grpc_transport, ) -if TYPE_CHECKING: - from google.cloud.firestore_v1.bulk_writer import BulkWriter # pragma: NO COVER +if TYPE_CHECKING: # pragma: NO COVER + import datetime + + from google.cloud.firestore_v1.bulk_writer import BulkWriter class AsyncClient(BaseClient): @@ -227,6 +229,8 @@ async def get_all( transaction: AsyncTransaction | None = None, retry: retries.AsyncRetry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, + *, + read_time: datetime.datetime | None = None, ) -> AsyncGenerator[DocumentSnapshot, Any]: """Retrieve a batch of documents. @@ -261,13 +265,17 @@ async def get_all( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Yields: .DocumentSnapshot: The next document snapshot that fulfills the query, or :data:`None` if the document does not exist. """ request, reference_map, kwargs = self._prep_get_all( - references, field_paths, transaction, retry, timeout + references, field_paths, transaction, retry, timeout, read_time ) response_iterator = await self._firestore_api.batch_get_documents( @@ -283,6 +291,8 @@ async def collections( self, retry: retries.AsyncRetry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, + *, + read_time: datetime.datetime | None = None, ) -> AsyncGenerator[AsyncCollectionReference, Any]: """List top-level collections of the client's database. @@ -291,12 +301,16 @@ async def collections( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: Sequence[:class:`~google.cloud.firestore_v1.async_collection.AsyncCollectionReference`]: iterator of subcollections of the current document. """ - request, kwargs = self._prep_collections(retry, timeout) + request, kwargs = self._prep_collections(retry, timeout, read_time) iterator = await self._firestore_api.list_collection_ids( request=request, metadata=self._rpc_metadata, diff --git a/google/cloud/firestore_v1/async_collection.py b/google/cloud/firestore_v1/async_collection.py index 8c832b8f4c..1b71372dd2 100644 --- a/google/cloud/firestore_v1/async_collection.py +++ b/google/cloud/firestore_v1/async_collection.py @@ -34,6 +34,8 @@ from google.cloud.firestore_v1.document import DocumentReference if TYPE_CHECKING: # pragma: NO COVER + import datetime + from google.cloud.firestore_v1.async_stream_generator import AsyncStreamGenerator from google.cloud.firestore_v1.base_document import DocumentSnapshot from google.cloud.firestore_v1.query_profile import ExplainOptions @@ -162,6 +164,8 @@ async def list_documents( page_size: int | None = None, retry: retries.AsyncRetry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, + *, + read_time: datetime.datetime | None = None, ) -> AsyncGenerator[DocumentReference, None]: """List all subdocuments of the current collection. @@ -173,6 +177,10 @@ async def list_documents( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: Sequence[:class:`~google.cloud.firestore_v1.collection.DocumentReference`]: @@ -180,7 +188,9 @@ async def list_documents( collection does not exist at the time of `snapshot`, the iterator will be empty """ - request, kwargs = self._prep_list_documents(page_size, retry, timeout) + request, kwargs = self._prep_list_documents( + page_size, retry, timeout, read_time + ) iterator = await self._client._firestore_api.list_documents( request=request, @@ -197,6 +207,7 @@ async def get( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> QueryResultsList[DocumentSnapshot]: """Read the documents in this collection. @@ -216,6 +227,10 @@ async def get( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. If a ``transaction`` is used and it already has write operations added, this method cannot be used (i.e. read-after-write is not allowed). @@ -227,6 +242,8 @@ async def get( query, kwargs = self._prep_get_or_stream(retry, timeout) if explain_options is not None: kwargs["explain_options"] = explain_options + if read_time is not None: + kwargs["read_time"] = read_time return await query.get(transaction=transaction, **kwargs) @@ -237,6 +254,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> AsyncStreamGenerator[DocumentSnapshot]: """Read the documents in this collection. @@ -268,6 +286,10 @@ def stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: `AsyncStreamGenerator[DocumentSnapshot]`: A generator of the query @@ -276,5 +298,7 @@ def stream( query, kwargs = self._prep_get_or_stream(retry, timeout) if explain_options: kwargs["explain_options"] = explain_options + if read_time is not None: + kwargs["read_time"] = read_time return query.stream(transaction=transaction, **kwargs) diff --git a/google/cloud/firestore_v1/async_document.py b/google/cloud/firestore_v1/async_document.py index 78c71b33fc..c3ebfbe0cc 100644 --- a/google/cloud/firestore_v1/async_document.py +++ b/google/cloud/firestore_v1/async_document.py @@ -329,6 +329,8 @@ async def get( transaction=None, retry: retries.AsyncRetry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, + *, + read_time: datetime.datetime | None = None, ) -> DocumentSnapshot: """Retrieve a snapshot of the current document. @@ -351,6 +353,10 @@ async def get( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: :class:`~google.cloud.firestore_v1.base_document.DocumentSnapshot`: @@ -362,7 +368,9 @@ async def get( """ from google.cloud.firestore_v1.base_client import _parse_batch_get - request, kwargs = self._prep_batch_get(field_paths, transaction, retry, timeout) + request, kwargs = self._prep_batch_get( + field_paths, transaction, retry, timeout, read_time + ) response_iter = await self._client._firestore_api.batch_get_documents( request=request, @@ -397,6 +405,8 @@ async def collections( page_size: int | None = None, retry: retries.AsyncRetry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, + *, + read_time: datetime.datetime | None = None, ) -> AsyncGenerator: """List subcollections of the current document. @@ -408,6 +418,10 @@ async def collections( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Returns: Sequence[:class:`~google.cloud.firestore_v1.async_collection.AsyncCollectionReference`]: @@ -415,7 +429,7 @@ async def collections( document does not exist at the time of `snapshot`, the iterator will be empty """ - request, kwargs = self._prep_collections(page_size, retry, timeout) + request, kwargs = self._prep_collections(page_size, retry, timeout, read_time) iterator = await self._client._firestore_api.list_collection_ids( request=request, diff --git a/google/cloud/firestore_v1/async_query.py b/google/cloud/firestore_v1/async_query.py index d4fd45fa46..98de75bd63 100644 --- a/google/cloud/firestore_v1/async_query.py +++ b/google/cloud/firestore_v1/async_query.py @@ -40,6 +40,8 @@ from google.cloud.firestore_v1.query_results import QueryResultsList if TYPE_CHECKING: # pragma: NO COVER + import datetime + # Types needed only for Type Hints from google.cloud.firestore_v1.async_transaction import AsyncTransaction from google.cloud.firestore_v1.base_document import DocumentSnapshot @@ -182,6 +184,7 @@ async def get( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> QueryResultsList[DocumentSnapshot]: """Read the documents in the collection that match this query. @@ -201,6 +204,10 @@ async def get( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. If a ``transaction`` is used and it already has write operations added, this method cannot be used (i.e. read-after-write is not @@ -230,6 +237,7 @@ async def get( retry=retry, timeout=timeout, explain_options=explain_options, + read_time=read_time, ) try: result_list = [d async for d in result] @@ -336,6 +344,7 @@ async def _make_stream( retry: retries.AsyncRetry | object | None = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> AsyncGenerator[DocumentSnapshot | query_profile_pb.ExplainMetrics, Any]: """Internal method for stream(). Read the documents in the collection that match this query. @@ -368,6 +377,10 @@ async def _make_stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. Yields: [:class:`~google.cloud.firestore_v1.base_document.DocumentSnapshot` \ @@ -381,6 +394,7 @@ async def _make_stream( retry, timeout, explain_options, + read_time, ) response_iterator = await self._client._firestore_api.run_query( @@ -412,6 +426,7 @@ def stream( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> AsyncStreamGenerator[DocumentSnapshot]: """Read the documents in the collection that match this query. @@ -443,6 +458,10 @@ def stream( (Optional[:class:`~google.cloud.firestore_v1.query_profile.ExplainOptions`]): Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. Returns: `AsyncStreamGenerator[DocumentSnapshot]`: @@ -453,6 +472,7 @@ def stream( retry=retry, timeout=timeout, explain_options=explain_options, + read_time=read_time, ) return AsyncStreamGenerator(inner_generator, explain_options) @@ -514,6 +534,8 @@ async def get_partitions( partition_count, retry: retries.AsyncRetry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, + *, + read_time: Optional[datetime.datetime] = None, ) -> AsyncGenerator[QueryPartition, None]: """Partition a query for parallelization. @@ -529,8 +551,15 @@ async def get_partitions( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a microsecond precision timestamp within the past one hour, or + if Point-in-Time Recovery is enabled, can additionally be a whole minute timestamp + within the past 7 days. For the most accurate results, use UTC timezone. """ - request, kwargs = self._prep_get_partitions(partition_count, retry, timeout) + request, kwargs = self._prep_get_partitions( + partition_count, retry, timeout, read_time + ) + pager = await self._client._firestore_api.partition_query( request=request, metadata=self._client._rpc_metadata, diff --git a/google/cloud/firestore_v1/async_transaction.py b/google/cloud/firestore_v1/async_transaction.py index 038710929b..be8668cd62 100644 --- a/google/cloud/firestore_v1/async_transaction.py +++ b/google/cloud/firestore_v1/async_transaction.py @@ -36,6 +36,8 @@ # Types needed only for Type Hints if TYPE_CHECKING: # pragma: NO COVER + import datetime + from google.cloud.firestore_v1.async_stream_generator import AsyncStreamGenerator from google.cloud.firestore_v1.base_document import DocumentSnapshot from google.cloud.firestore_v1.query_profile import ExplainOptions @@ -154,6 +156,8 @@ async def get_all( references: list, retry: retries.AsyncRetry | object | None = gapic_v1.method.DEFAULT, timeout: float | None = None, + *, + read_time: datetime.datetime | None = None, ) -> AsyncGenerator[DocumentSnapshot, Any]: """Retrieves multiple documents from Firestore. @@ -164,12 +168,18 @@ async def get_all( should be retried. Defaults to a system-specified policy. timeout (float): The timeout for this request. Defaults to a system-specified value. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Yields: .DocumentSnapshot: The next document snapshot that fulfills the query, or :data:`None` if the document does not exist. """ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) + if read_time is not None: + kwargs["read_time"] = read_time return await self._client.get_all(references, transaction=self, **kwargs) async def get( @@ -179,6 +189,7 @@ async def get( timeout: Optional[float] = None, *, explain_options: Optional[ExplainOptions] = None, + read_time: Optional[datetime.datetime] = None, ) -> AsyncGenerator[DocumentSnapshot, Any] | AsyncStreamGenerator[DocumentSnapshot]: """ Retrieve a document or a query result from the database. @@ -195,6 +206,10 @@ async def get( Options to enable query profiling for this query. When set, explain_metrics will be available on the returned generator. Can only be used when running a query, not a document reference. + read_time (Optional[datetime.datetime]): If set, reads documents as they were at the given + time. This must be a timestamp within the past one hour, or if Point-in-Time Recovery + is enabled, can additionally be a whole minute timestamp within the past 7 days. If no + timezone is specified in the :class:`datetime.datetime` object, it is assumed to be UTC. Yields: DocumentSnapshot: The next document snapshot that fulfills the query, @@ -206,6 +221,8 @@ async def get( reference. """ kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) + if read_time is not None: + kwargs["read_time"] = read_time if isinstance(ref_or_query, AsyncDocumentReference): if explain_options is not None: raise ValueError( diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 368b463df9..c66340de1e 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -1142,10 +1142,7 @@ def test_collection_add(client, cleanup, database): @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) -@pytest.mark.parametrize("use_python_datetime", [True, False]) -def test_list_collections_with_read_time( - client, cleanup, database, use_python_datetime -): +def test_list_collections_with_read_time(client, cleanup, database): # TODO(microgen): list_documents is returning a generator, not a list. # Consider if this is desired. Also, Document isn't hashable. collection_id = "coll-add" + UNIQUE_RESOURCE_ID @@ -1155,15 +1152,11 @@ def test_list_collections_with_read_time( data1 = {"foo": "bar"} update_time1, document_ref1 = collection.add(data1) - if use_python_datetime: - update_time1 = datetime.datetime.now(tz=datetime.timezone.utc) cleanup(document_ref1.delete) assert set(collection.list_documents()) == {document_ref1} data2 = {"bar": "baz"} update_time2, document_ref2 = collection.add(data2) - if use_python_datetime: - update_time2 = datetime.datetime.now(tz=datetime.timezone.utc) cleanup(document_ref2.delete) assert set(collection.list_documents()) == {document_ref1, document_ref2} assert set(collection.list_documents(read_time=update_time1)) == {document_ref1} diff --git a/tests/system/test_system_async.py b/tests/system/test_system_async.py index 200be7d8ab..ed9984954d 100644 --- a/tests/system/test_system_async.py +++ b/tests/system/test_system_async.py @@ -234,6 +234,41 @@ async def test_create_document(client, cleanup, database): assert stored_data == expected_data +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +async def test_collections_w_read_time(client, cleanup, database): + first_collection_id = "doc-create" + UNIQUE_RESOURCE_ID + first_document_id = "doc" + UNIQUE_RESOURCE_ID + first_document = client.document(first_collection_id, first_document_id) + # Add to clean-up before API request (in case ``create()`` fails). + cleanup(first_document.delete) + + data = {"status": "new"} + write_result = await first_document.create(data) + read_time = write_result.update_time + num_collections = len([x async for x in client.collections(retry=RETRIES)]) + + second_collection_id = "doc-create" + UNIQUE_RESOURCE_ID + "-2" + second_document_id = "doc" + UNIQUE_RESOURCE_ID + "-2" + second_document = client.document(second_collection_id, second_document_id) + cleanup(second_document.delete) + await second_document.create(data) + + # Test that listing current collections does have the second id. + curr_collections = [x async for x in client.collections(retry=RETRIES)] + assert len(curr_collections) > num_collections + ids = [collection.id for collection in curr_collections] + assert second_collection_id in ids + assert first_collection_id in ids + + # We're just testing that we added one collection at read_time, not two. + collections = [ + x async for x in client.collections(retry=RETRIES, read_time=read_time) + ] + ids = [collection.id for collection in collections] + assert second_collection_id not in ids + assert first_collection_id in ids + + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) async def test_create_document_w_subcollection(client, cleanup, database): collection_id = "doc-create-sub" + UNIQUE_RESOURCE_ID @@ -260,6 +295,42 @@ def assert_timestamp_less(timestamp_pb1, timestamp_pb2): assert timestamp_pb1 < timestamp_pb2 +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +async def test_document_collections_w_read_time(client, cleanup, database): + collection_id = "doc-create-sub" + UNIQUE_RESOURCE_ID + document_id = "doc" + UNIQUE_RESOURCE_ID + document = client.document(collection_id, document_id) + # Add to clean-up before API request (in case ``create()`` fails). + cleanup(document.delete) + + data = {"now": firestore.SERVER_TIMESTAMP} + document.create(data) + + original_child_ids = ["child1", "child2"] + read_time = None + + for child_id in original_child_ids: + subcollection = document.collection(child_id) + update_time, subdoc = await subcollection.add({"foo": "bar"}) + read_time = ( + update_time if read_time is None or update_time > read_time else read_time + ) + cleanup(subdoc.delete) + + update_time, newdoc = await document.collection("child3").add({"foo": "bar"}) + cleanup(newdoc.delete) + assert update_time > read_time + + # Compare the query at read_time to the query at new update time. + original_children = [doc async for doc in document.collections(read_time=read_time)] + assert sorted(child.id for child in original_children) == sorted(original_child_ids) + + original_children = [doc async for doc in document.collections()] + assert sorted(child.id for child in original_children) == sorted( + original_child_ids + ["child3"] + ) + + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) async def test_no_document(client, database): document_id = "no_document" + UNIQUE_RESOURCE_ID @@ -1062,6 +1133,38 @@ async def test_collection_add(client, cleanup, database): assert set([i async for i in collection3.list_documents()]) == {document_ref5} +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +async def test_list_collections_with_read_time(client, cleanup, database): + # TODO(microgen): list_documents is returning a generator, not a list. + # Consider if this is desired. Also, Document isn't hashable. + collection_id = "coll-add" + UNIQUE_RESOURCE_ID + collection = client.collection(collection_id) + + assert set([i async for i in collection.list_documents()]) == set() + + data1 = {"foo": "bar"} + update_time1, document_ref1 = await collection.add(data1) + cleanup(document_ref1.delete) + assert set([i async for i in collection.list_documents()]) == {document_ref1} + + data2 = {"bar": "baz"} + update_time2, document_ref2 = await collection.add(data2) + cleanup(document_ref2.delete) + assert set([i async for i in collection.list_documents()]) == { + document_ref1, + document_ref2, + } + assert set( + [i async for i in collection.list_documents(read_time=update_time1)] + ) == {document_ref1} + assert set( + [i async for i in collection.list_documents(read_time=update_time2)] + ) == { + document_ref1, + document_ref2, + } + + @pytest_asyncio.fixture async def query_docs(client): collection_id = "qs" + UNIQUE_RESOURCE_ID @@ -1389,6 +1492,46 @@ async def test_query_stream_or_get_w_explain_options_analyze_false( _verify_explain_metrics_analyze_false(explain_metrics) +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +async def test_query_stream_w_read_time(query_docs, cleanup, database): + collection, stored, allowed_vals = query_docs + num_vals = len(allowed_vals) + + # Find the most recent read_time in collections + read_time = max( + [(await docref.get()).read_time async for docref in collection.list_documents()] + ) + new_data = { + "a": 9000, + "b": 1, + "c": [10000, 1000], + "stats": {"sum": 9001, "product": 9000}, + } + _, new_ref = await collection.add(new_data) + # Add to clean-up. + cleanup(new_ref.delete) + stored[new_ref.id] = new_data + + # Compare query at read_time to query at current time. + query = collection.where(filter=FieldFilter("b", "==", 1)) + values = { + snapshot.id: snapshot.to_dict() + async for snapshot in query.stream(read_time=read_time) + } + assert len(values) == num_vals + assert new_ref.id not in values + for key, value in values.items(): + assert stored[key] == value + assert value["b"] == 1 + assert value["a"] != 9000 + assert key != new_ref + + new_values = {snapshot.id: snapshot.to_dict() async for snapshot in query.stream()} + assert len(new_values) == num_vals + 1 + assert new_ref.id in new_values + assert new_values[new_ref.id] == new_data + + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) async def test_query_with_order_dot_key(client, cleanup, database): db = client @@ -1853,6 +1996,8 @@ async def test_get_all(client, cleanup, database): data3 = {"a": {"b": 5, "c": 6}, "d": 7, "e": 100} write_result3 = await document3.create(data3) + read_time = write_result3.update_time + # 0. Get 3 unique documents, one of which is missing. snapshots = [i async for i in client.get_all([document1, document2, document3])] @@ -1891,6 +2036,22 @@ async def test_get_all(client, cleanup, database): restricted3 = {"a": {"b": data3["a"]["b"]}, "d": data3["d"]} check_snapshot(snapshot3, document3, restricted3, write_result3) + # 3. Use ``read_time`` in ``get_all`` + new_data = {"a": {"b": 8, "c": 9}, "d": 10, "e": 1010} + await document1.update(new_data) + await document2.create(new_data) + await document3.update(new_data) + + snapshots = [ + i + async for i in client.get_all( + [document1, document2, document3], read_time=read_time + ) + ] + assert snapshots[0].exists + assert snapshots[1].exists + assert not snapshots[2].exists + @pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) async def test_live_bulk_writer(client, cleanup, database): @@ -2765,6 +2926,50 @@ async def test_async_avg_query_stream_w_explain_options_analyze_false( explain_metrics.execution_stats +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +@pytest.mark.parametrize( + "aggregation_type,expected_value", [("count", 5), ("sum", 100), ("avg", 4.0)] +) +async def test_aggregation_queries_with_read_time( + collection, async_query, cleanup, database, aggregation_type, expected_value +): + """ + Ensure that all aggregation queries work when read_time is passed into + a query..().get() method + """ + # Find the most recent read_time in collections + read_time = max( + [(await docref.get()).read_time async for docref in collection.list_documents()] + ) + document_data = { + "a": 1, + "b": 9000, + "c": [1, 123123123], + "stats": {"sum": 9001, "product": 9000}, + } + + _, doc_ref = await collection.add(document_data) + cleanup(doc_ref.delete) + + if aggregation_type == "count": + aggregation_query = async_query.count() + elif aggregation_type == "sum": + aggregation_query = collection.sum("stats.product") + elif aggregation_type == "avg": + aggregation_query = collection.avg("stats.product") + + # Check that adding the new document data affected the results of the aggregation queries. + new_result = await aggregation_query.get() + assert len(new_result) == 1 + for r in new_result[0]: + assert r.value != expected_value + + old_result = await aggregation_query.get(read_time=read_time) + assert len(old_result) == 1 + for r in old_result[0]: + assert r.value == expected_value + + @firestore.async_transactional async def create_in_transaction_helper( transaction, client, collection_id, cleanup, database @@ -3176,3 +3381,53 @@ async def in_transaction(transaction): # make sure we didn't skip assertions in inner function assert inner_fn_ran is True + + +@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True) +async def test_query_in_transaction_with_read_time(client, cleanup, database): + """ + Test query profiling in transactions. + """ + collection_id = "doc-create" + UNIQUE_RESOURCE_ID + doc_ids = [f"doc{i}" + UNIQUE_RESOURCE_ID for i in range(5)] + doc_refs = [client.document(collection_id, doc_id) for doc_id in doc_ids] + for doc_ref in doc_refs: + cleanup(doc_ref.delete) + await doc_refs[0].create({"a": 1, "b": 2}) + await doc_refs[1].create({"a": 1, "b": 1}) + + read_time = max([(await docref.get()).read_time for docref in doc_refs]) + await doc_refs[2].create({"a": 1, "b": 3}) + + collection = client.collection(collection_id) + query = collection.where(filter=FieldFilter("a", "==", 1)) + + # should work when transaction is initiated through transactional decorator + async with client.transaction() as transaction: + + @firestore.async_transactional + async def in_transaction(transaction): + global inner_fn_ran + + new_b_values = [ + docs.get("b") + async for docs in await transaction.get(query, read_time=read_time) + ] + assert len(new_b_values) == 2 + assert 1 in new_b_values + assert 2 in new_b_values + assert 3 not in new_b_values + + new_b_values = [ + docs.get("b") async for docs in await transaction.get(query) + ] + assert len(new_b_values) == 3 + assert 1 in new_b_values + assert 2 in new_b_values + assert 3 in new_b_values + + inner_fn_ran = True + + await in_transaction(transaction) + # make sure we didn't skip assertions in inner function + assert inner_fn_ran is True diff --git a/tests/unit/v1/test_async_aggregation.py b/tests/unit/v1/test_async_aggregation.py index 6254c4c87f..9140f53e81 100644 --- a/tests/unit/v1/test_async_aggregation.py +++ b/tests/unit/v1/test_async_aggregation.py @@ -321,9 +321,39 @@ def test_async_aggregation_query_prep_stream_with_explain_options(): assert kwargs == {"retry": None} +def test_async_aggregation_query_prep_stream_with_read_time(): + client = make_async_client() + parent = client.collection("dee") + query = make_async_query(parent) + aggregation_query = make_async_aggregation_query(query) + + aggregation_query.count(alias="all") + aggregation_query.sum("someref", alias="sumall") + aggregation_query.avg("anotherref", alias="avgall") + + # 1800 seconds after epoch + read_time = datetime.now() + + request, kwargs = aggregation_query._prep_stream(read_time=read_time) + + parent_path, _ = parent._parent_info() + expected_request = { + "parent": parent_path, + "structured_aggregation_query": aggregation_query._to_protobuf(), + "transaction": None, + "read_time": read_time, + } + assert request == expected_request + assert kwargs == {"retry": None} + + @pytest.mark.asyncio async def _async_aggregation_query_get_helper( - retry=None, timeout=None, read_time=None, explain_options=None + retry=None, + timeout=None, + explain_options=None, + response_read_time=None, + query_read_time=None, ): from google.cloud._helpers import _datetime_to_pb_timestamp @@ -342,7 +372,11 @@ async def _async_aggregation_query_get_helper( aggregation_query = make_async_aggregation_query(query) aggregation_query.count(alias="all") - aggregation_result = AggregationResult(alias="total", value=5, read_time=read_time) + aggregation_result = AggregationResult( + alias="total", + value=5, + read_time=response_read_time, + ) if explain_options is not None: explain_metrics = {"execution_stats": {"results_returned": 1}} @@ -351,14 +385,18 @@ async def _async_aggregation_query_get_helper( response_pb = make_aggregation_query_response( [aggregation_result], - read_time=read_time, + read_time=response_read_time, explain_metrics=explain_metrics, ) firestore_api.run_aggregation_query.return_value = AsyncIter([response_pb]) kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) # Execute the query and check the response. - returned = await aggregation_query.get(**kwargs, explain_options=explain_options) + returned = await aggregation_query.get( + **kwargs, + explain_options=explain_options, + read_time=query_read_time, + ) assert isinstance(returned, QueryResultsList) assert len(returned) == 1 @@ -366,9 +404,9 @@ async def _async_aggregation_query_get_helper( for r in result: assert r.alias == aggregation_result.alias assert r.value == aggregation_result.value - if read_time is not None: + if response_read_time is not None: result_datetime = _datetime_to_pb_timestamp(r.read_time) - assert result_datetime == read_time + assert result_datetime == response_read_time if explain_options is None: with pytest.raises(QueryExplainError, match="explain_options not set"): @@ -387,6 +425,8 @@ async def _async_aggregation_query_get_helper( } if explain_options is not None: expected_request["explain_options"] = explain_options._to_dict() + if query_read_time is not None: + expected_request["read_time"] = query_read_time firestore_api.run_aggregation_query.assert_called_once_with( request=expected_request, metadata=client._rpc_metadata, @@ -405,7 +445,9 @@ async def test_async_aggregation_query_get_with_readtime(): one_hour_ago = datetime.now(tz=timezone.utc) - timedelta(hours=1) read_time = _datetime_to_pb_timestamp(one_hour_ago) - await _async_aggregation_query_get_helper(read_time=read_time) + await _async_aggregation_query_get_helper( + query_read_time=one_hour_ago, response_read_time=read_time + ) @pytest.mark.asyncio @@ -583,7 +625,11 @@ async def _async_aggregation_query_stream_helper( kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) # Execute the query and check the response. - returned = aggregation_query.stream(**kwargs, explain_options=explain_options) + returned = aggregation_query.stream( + **kwargs, + explain_options=explain_options, + read_time=read_time, + ) assert isinstance(returned, AsyncStreamGenerator) results = [] @@ -611,6 +657,8 @@ async def _async_aggregation_query_stream_helper( } if explain_options is not None: expected_request["explain_options"] = explain_options._to_dict() + if read_time is not None: + expected_request["read_time"] = read_time # Verify the mock call. firestore_api.run_aggregation_query.assert_called_once_with( @@ -625,6 +673,15 @@ async def test_aggregation_query_stream(): await _async_aggregation_query_stream_helper() +@pytest.mark.asyncio +async def test_async_aggregation_query_stream_with_read_time(): + from google.cloud._helpers import _datetime_to_pb_timestamp + + one_hour_ago = datetime.now(tz=timezone.utc) - timedelta(hours=1) + read_time = _datetime_to_pb_timestamp(one_hour_ago) + await _async_aggregation_query_stream_helper(read_time=read_time) + + @pytest.mark.asyncio async def test_aggregation_query_stream_w_explain_options_analyze_true(): from google.cloud.firestore_v1.query_profile import ExplainOptions diff --git a/tests/unit/v1/test_async_client.py b/tests/unit/v1/test_async_client.py index ee624d382b..4924856a84 100644 --- a/tests/unit/v1/test_async_client.py +++ b/tests/unit/v1/test_async_client.py @@ -187,7 +187,7 @@ def test_asyncclient_document_factory_w_nested_path(): assert isinstance(document2, AsyncDocumentReference) -async def _collections_helper(retry=None, timeout=None): +async def _collections_helper(retry=None, timeout=None, read_time=None): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.async_collection import AsyncCollectionReference @@ -206,7 +206,7 @@ async def __aiter__(self, **_): client._firestore_api_internal = firestore_api kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) - collections = [c async for c in client.collections(**kwargs)] + collections = [c async for c in client.collections(read_time=read_time, **kwargs)] assert len(collections) == len(collection_ids) for collection, collection_id in zip(collections, collection_ids): @@ -215,8 +215,13 @@ async def __aiter__(self, **_): assert collection.id == collection_id base_path = client._database_string + "/documents" + expected_request = { + "parent": base_path, + } + if read_time is not None: + expected_request["read_time"] = read_time firestore_api.list_collection_ids.assert_called_once_with( - request={"parent": base_path}, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) @@ -236,6 +241,12 @@ async def test_asyncclient_collections_w_retry_timeout(): await _collections_helper(retry=retry, timeout=timeout) +@pytest.mark.asyncio +async def test_asyncclient_collections_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + await _collections_helper(read_time=read_time) + + async def _invoke_get_all(client, references, document_pbs, **kwargs): # Create a minimal fake GAPIC with a dummy response. firestore_api = AsyncMock(spec=["batch_get_documents"]) @@ -252,7 +263,13 @@ async def _invoke_get_all(client, references, document_pbs, **kwargs): return [s async for s in snapshots] -async def _get_all_helper(num_snapshots=2, txn_id=None, retry=None, timeout=None): +async def _get_all_helper( + num_snapshots=2, + txn_id=None, + retry=None, + timeout=None, + read_time=None, +): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.async_document import DocumentSnapshot from google.cloud.firestore_v1.types import common @@ -261,13 +278,13 @@ async def _get_all_helper(num_snapshots=2, txn_id=None, retry=None, timeout=None data1 = {"a": "cheese"} document1 = client.document("pineapple", "lamp1") - document_pb1, read_time = _doc_get_info(document1._document_path, data1) - response1 = _make_batch_response(found=document_pb1, read_time=read_time) + document_pb1, doc_read_time = _doc_get_info(document1._document_path, data1) + response1 = _make_batch_response(found=document_pb1, read_time=doc_read_time) data2 = {"b": True, "c": 18} document2 = client.document("pineapple", "lamp2") - document, read_time = _doc_get_info(document2._document_path, data2) - response2 = _make_batch_response(found=document, read_time=read_time) + document, doc_read_time = _doc_get_info(document2._document_path, data2) + response2 = _make_batch_response(found=document, read_time=doc_read_time) document3 = client.document("pineapple", "lamp3") response3 = _make_batch_response(missing=document3._document_path) @@ -290,6 +307,7 @@ async def _get_all_helper(num_snapshots=2, txn_id=None, retry=None, timeout=None documents, responses, field_paths=field_paths, + read_time=read_time, **kwargs, ) @@ -308,14 +326,17 @@ async def _get_all_helper(num_snapshots=2, txn_id=None, retry=None, timeout=None mask = common.DocumentMask(field_paths=field_paths) kwargs.pop("transaction", None) + expected_request = { + "database": client._database_string, + "documents": doc_paths, + "mask": mask, + "transaction": txn_id, + } + if read_time is not None: + expected_request["read_time"] = read_time client._firestore_api.batch_get_documents.assert_called_once_with( - request={ - "database": client._database_string, - "documents": doc_paths, - "mask": mask, - "transaction": txn_id, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) @@ -346,6 +367,12 @@ async def test_asyncclient_get_all_wrong_order(): await _get_all_helper(num_snapshots=3) +@pytest.mark.asyncio +async def test_asyncclient_get_all_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + await _get_all_helper(read_time=read_time) + + @pytest.mark.asyncio async def test_asyncclient_get_all_unknown_result(): from google.cloud.firestore_v1.base_client import _BAD_DOC_TEMPLATE diff --git a/tests/unit/v1/test_async_collection.py b/tests/unit/v1/test_async_collection.py index 497fc455fa..a0194ace5b 100644 --- a/tests/unit/v1/test_async_collection.py +++ b/tests/unit/v1/test_async_collection.py @@ -17,6 +17,7 @@ import mock import pytest +from datetime import datetime, timezone from tests.unit.v1._test_helpers import DEFAULT_TEST_PROJECT, make_async_client from tests.unit.v1.test__helpers import AsyncIter, AsyncMock @@ -302,7 +303,9 @@ async def _get_chunk(*args, **kwargs): @pytest.mark.asyncio -async def _list_documents_helper(page_size=None, retry=None, timeout=None): +async def _list_documents_helper( + page_size=None, retry=None, timeout=None, read_time=None +): from google.api_core.page_iterator import Page from google.api_core.page_iterator_async import AsyncIterator @@ -338,12 +341,13 @@ async def _next_page(self): documents = [ i async for i in collection.list_documents( - page_size=page_size, - **kwargs, + page_size=page_size, **kwargs, read_time=read_time ) ] else: - documents = [i async for i in collection.list_documents(**kwargs)] + documents = [ + i async for i in collection.list_documents(**kwargs, read_time=read_time) + ] # Verify the response and the mocks. assert len(documents) == len(document_ids) @@ -353,14 +357,17 @@ async def _next_page(self): assert document.id == document_id parent, _ = collection._parent_info() + expected_request = { + "parent": parent, + "collection_id": collection.id, + "page_size": page_size, + "show_missing": True, + "mask": {"field_paths": None}, + } + if read_time is not None: + expected_request["read_time"] = read_time firestore_api.list_documents.assert_called_once_with( - request={ - "parent": parent, - "collection_id": collection.id, - "page_size": page_size, - "show_missing": True, - "mask": {"field_paths": None}, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) @@ -385,6 +392,11 @@ async def test_asynccollectionreference_list_documents_w_page_size(): await _list_documents_helper(page_size=25) +@pytest.mark.asyncio +async def test_asynccollectionreference_list_documents_w_read_time(): + await _list_documents_helper(read_time=datetime.now(tz=timezone.utc)) + + @mock.patch("google.cloud.firestore_v1.async_query.AsyncQuery", autospec=True) @pytest.mark.asyncio async def test_asynccollectionreference_get(query_class): @@ -450,6 +462,21 @@ async def test_asynccollectionreference_get_w_explain_options(query_class): ) +@mock.patch("google.cloud.firestore_v1.async_query.AsyncQuery", autospec=True) +@pytest.mark.asyncio +async def test_asynccollectionreference_get_w_read_time(query_class): + read_time = datetime.now(tz=timezone.utc) + collection = _make_async_collection_reference("collection") + await collection.get(read_time=read_time) + + query_class.assert_called_once_with(collection) + query_instance = query_class.return_value + query_instance.get.assert_called_once_with( + transaction=None, + read_time=read_time, + ) + + @mock.patch("google.cloud.firestore_v1.async_query.AsyncQuery", autospec=True) @pytest.mark.asyncio async def test_asynccollectionreference_stream(query_class): @@ -552,6 +579,23 @@ async def response_generator(): assert explain_metrics.execution_stats.results_returned == 1 +@mock.patch("google.cloud.firestore_v1.async_query.AsyncQuery", autospec=True) +@pytest.mark.asyncio +async def test_asynccollectionreference_stream_w_read_time(query_class): + read_time = datetime.now(tz=timezone.utc) + collection = _make_async_collection_reference("collection") + get_response = collection.stream(read_time=read_time) + + query_class.assert_called_once_with(collection) + query_instance = query_class.return_value + + assert get_response is query_instance.stream.return_value + query_instance.stream.assert_called_once_with( + transaction=None, + read_time=read_time, + ) + + def test_asynccollectionreference_recursive(): from google.cloud.firestore_v1.async_query import AsyncQuery diff --git a/tests/unit/v1/test_async_document.py b/tests/unit/v1/test_async_document.py index 8d67e78f08..45472c6604 100644 --- a/tests/unit/v1/test_async_document.py +++ b/tests/unit/v1/test_async_document.py @@ -17,6 +17,9 @@ import mock import pytest +from datetime import datetime + +from google.protobuf import timestamp_pb2 from tests.unit.v1._test_helpers import make_async_client from tests.unit.v1.test__helpers import AsyncIter, AsyncMock @@ -399,6 +402,7 @@ async def _get_helper( return_empty=False, retry=None, timeout=None, + read_time=None, ): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.transaction import Transaction @@ -407,10 +411,14 @@ async def _get_helper( # Create a minimal fake GAPIC with a dummy response. create_time = 123 update_time = 234 - read_time = 345 + if read_time: + response_read_time = timestamp_pb2.Timestamp() + response_read_time.FromDatetime(read_time) + else: + response_read_time = 345 firestore_api = AsyncMock(spec=["batch_get_documents"]) response = mock.create_autospec(firestore.BatchGetDocumentsResponse) - response.read_time = 345 + response.read_time = response_read_time response.found = mock.create_autospec(document.Document) response.found.fields = {} response.found.create_time = create_time @@ -445,6 +453,7 @@ def WhichOneof(val): field_paths=field_paths, transaction=transaction, **kwargs, + read_time=read_time, ) assert snapshot.reference is document_reference @@ -457,7 +466,7 @@ def WhichOneof(val): else: assert snapshot.to_dict() == {} assert snapshot.exists - assert snapshot.read_time is read_time + assert snapshot.read_time is response_read_time assert snapshot.create_time is create_time assert snapshot.update_time is update_time @@ -472,13 +481,17 @@ def WhichOneof(val): else: expected_transaction_id = None + expected_request = { + "database": client._database_string, + "documents": [document_reference._document_path], + "mask": mask, + "transaction": expected_transaction_id, + } + if read_time is not None: + expected_request["read_time"] = read_time + firestore_api.batch_get_documents.assert_called_once_with( - request={ - "database": client._database_string, - "documents": [document_reference._document_path], - "mask": mask, - "transaction": expected_transaction_id, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) @@ -530,7 +543,12 @@ async def test_asyncdocumentreference_get_with_transaction(): @pytest.mark.asyncio -async def _collections_helper(page_size=None, retry=None, timeout=None): +async def test_asyncdocumentreference_get_with_read_time(): + await _get_helper(read_time=datetime.now()) + + +@pytest.mark.asyncio +async def _collections_helper(page_size=None, retry=None, timeout=None, read_time=None): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.async_collection import AsyncCollectionReference @@ -553,10 +571,15 @@ async def __aiter__(self, **_): document = _make_async_document_reference("where", "we-are", client=client) if page_size is not None: collections = [ - c async for c in document.collections(page_size=page_size, **kwargs) + c + async for c in document.collections( + page_size=page_size, **kwargs, read_time=read_time + ) ] else: - collections = [c async for c in document.collections(**kwargs)] + collections = [ + c async for c in document.collections(**kwargs, read_time=read_time) + ] # Verify the response and the mocks. assert len(collections) == len(collection_ids) @@ -565,8 +588,15 @@ async def __aiter__(self, **_): assert collection.parent == document assert collection.id == collection_id + expected_result = { + "parent": document._document_path, + "page_size": page_size, + } + if read_time is not None: + expected_result["read_time"] = read_time + firestore_api.list_collection_ids.assert_called_once_with( - request={"parent": document._document_path, "page_size": page_size}, + request=expected_result, metadata=client._rpc_metadata, **kwargs, ) @@ -586,6 +616,11 @@ async def test_asyncdocumentreference_collections_w_retry_timeout(): await _collections_helper(retry=retry, timeout=timeout) +@pytest.mark.asyncio +async def test_documentreference_collections_w_read_time(): + await _collections_helper(read_time=datetime.now()) + + @pytest.mark.asyncio async def test_asyncdocumentreference_collections_w_page_size(): await _collections_helper(page_size=10) diff --git a/tests/unit/v1/test_async_query.py b/tests/unit/v1/test_async_query.py index efc6c7df78..54c80e5ad4 100644 --- a/tests/unit/v1/test_async_query.py +++ b/tests/unit/v1/test_async_query.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime import types import mock @@ -41,7 +42,7 @@ def test_asyncquery_constructor(): assert not query._all_descendants -async def _get_helper(retry=None, timeout=None, explain_options=None): +async def _get_helper(retry=None, timeout=None, explain_options=None, read_time=None): from google.cloud.firestore_v1 import _helpers # Create a minimal fake GAPIC. @@ -68,7 +69,9 @@ async def _get_helper(retry=None, timeout=None, explain_options=None): # Execute the query and check the response. query = make_async_query(parent) - returned = await query.get(**kwargs, explain_options=explain_options) + returned = await query.get( + **kwargs, explain_options=explain_options, read_time=read_time + ) assert isinstance(returned, QueryResultsList) assert len(returned) == 1 @@ -94,6 +97,8 @@ async def _get_helper(retry=None, timeout=None, explain_options=None): } if explain_options: request["explain_options"] = explain_options._to_dict() + if read_time: + request["read_time"] = read_time # Verify the mock call. firestore_api.run_query.assert_called_once_with( @@ -117,6 +122,12 @@ async def test_asyncquery_get_w_retry_timeout(): await _get_helper(retry=retry, timeout=timeout) +@pytest.mark.asyncio +async def test_asyncquery_get_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + await _get_helper(read_time=read_time) + + @pytest.mark.asyncio async def test_asyncquery_get_limit_to_last(): from google.cloud import firestore @@ -336,7 +347,9 @@ async def test_asyncquery_chunkify_w_chunksize_gt_limit(): assert [snapshot.id for snapshot in chunks[0]] == expected_ids -async def _stream_helper(retry=None, timeout=None, explain_options=None): +async def _stream_helper( + retry=None, timeout=None, explain_options=None, read_time=None +): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.async_stream_generator import AsyncStreamGenerator @@ -367,7 +380,9 @@ async def _stream_helper(retry=None, timeout=None, explain_options=None): # Execute the query and check the response. query = make_async_query(parent) - stream_response = query.stream(**kwargs, explain_options=explain_options) + stream_response = query.stream( + **kwargs, explain_options=explain_options, read_time=read_time + ) assert isinstance(stream_response, AsyncStreamGenerator) returned = [x async for x in stream_response] @@ -395,6 +410,8 @@ async def _stream_helper(retry=None, timeout=None, explain_options=None): } if explain_options is not None: request["explain_options"] = explain_options._to_dict() + if read_time is not None: + request["read_time"] = read_time # Verify the mock call. firestore_api.run_query.assert_called_once_with( @@ -418,6 +435,12 @@ async def test_asyncquery_stream_w_retry_timeout(): await _stream_helper(retry=retry, timeout=timeout) +@pytest.mark.asyncio +async def test_asyncquery_stream_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + await _stream_helper(read_time=read_time) + + @pytest.mark.asyncio async def test_asyncquery_stream_with_limit_to_last(): # Attach the fake GAPIC to a real client. @@ -481,6 +504,57 @@ async def test_asyncquery_stream_with_transaction(): ) +@pytest.mark.asyncio +async def test_asyncquery_stream_with_transaction_and_read_time(): + from google.cloud.firestore_v1.async_stream_generator import AsyncStreamGenerator + + # Create a minimal fake GAPIC. + firestore_api = AsyncMock(spec=["run_query"]) + + # Attach the fake GAPIC to a real client. + client = make_async_client() + client._firestore_api_internal = firestore_api + + # Create a real-ish transaction for this client. + transaction = client.transaction() + txn_id = b"\x00\x00\x01-work-\xf2" + transaction._id = txn_id + + # Create a read_time for this client. + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + + # Make a **real** collection reference as parent. + parent = client.collection("declaration") + + # Add a dummy response to the minimal fake GAPIC. + parent_path, expected_prefix = parent._parent_info() + name = "{}/burger".format(expected_prefix) + data = {"lettuce": b"\xee\x87"} + response_pb = _make_query_response(name=name, data=data) + firestore_api.run_query.return_value = AsyncIter([response_pb]) + + # Execute the query and check the response. + query = make_async_query(parent) + get_response = query.stream(transaction=transaction, read_time=read_time) + assert isinstance(get_response, AsyncStreamGenerator) + returned = [x async for x in get_response] + assert len(returned) == 1 + snapshot = returned[0] + assert snapshot.reference._path == ("declaration", "burger") + assert snapshot.to_dict() == data + + # Verify the mock call. + firestore_api.run_query.assert_called_once_with( + request={ + "parent": parent_path, + "structured_query": query._to_protobuf(), + "transaction": txn_id, + "read_time": read_time, + }, + metadata=client._rpc_metadata, + ) + + @pytest.mark.asyncio async def test_asyncquery_stream_no_results(): from google.cloud.firestore_v1.async_stream_generator import AsyncStreamGenerator @@ -718,7 +792,7 @@ def test_asynccollectiongroup_constructor_all_descendents_is_false(): @pytest.mark.asyncio -async def _get_partitions_helper(retry=None, timeout=None): +async def _get_partitions_helper(retry=None, timeout=None, read_time=None): from google.cloud.firestore_v1 import _helpers # Create a minimal fake GAPIC. @@ -743,7 +817,7 @@ async def _get_partitions_helper(retry=None, timeout=None): # Execute the query and check the response. query = _make_async_collection_group(parent) - get_response = query.get_partitions(2, **kwargs) + get_response = query.get_partitions(2, read_time=read_time, **kwargs) assert isinstance(get_response, types.AsyncGeneratorType) returned = [i async for i in get_response] @@ -755,12 +829,15 @@ async def _get_partitions_helper(retry=None, timeout=None): parent, orders=(query._make_order("__name__", query.ASCENDING),), ) + expected_request = { + "parent": parent_path, + "structured_query": partition_query._to_protobuf(), + "partition_count": 2, + } + if read_time is not None: + expected_request["read_time"] = read_time firestore_api.partition_query.assert_called_once_with( - request={ - "parent": parent_path, - "structured_query": partition_query._to_protobuf(), - "partition_count": 2, - }, + request=expected_request, metadata=client._rpc_metadata, **kwargs, ) @@ -780,6 +857,12 @@ async def test_asynccollectiongroup_get_partitions_w_retry_timeout(): await _get_partitions_helper(retry=retry, timeout=timeout) +@pytest.mark.asyncio +async def test_asynccollectiongroup_get_partitions_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + await _get_partitions_helper(read_time=read_time) + + @pytest.mark.asyncio async def test_asynccollectiongroup_get_partitions_w_filter(): # Make a **real** collection reference as parent. diff --git a/tests/unit/v1/test_async_transaction.py b/tests/unit/v1/test_async_transaction.py index e4bb788e3d..d357e3482a 100644 --- a/tests/unit/v1/test_async_transaction.py +++ b/tests/unit/v1/test_async_transaction.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime import mock import pytest @@ -294,13 +295,15 @@ async def test_asynctransaction__commit_failure(): ) -async def _get_all_helper(retry=None, timeout=None): +async def _get_all_helper(retry=None, timeout=None, read_time=None): from google.cloud.firestore_v1 import _helpers client = AsyncMock(spec=["get_all"]) transaction = _make_async_transaction(client) ref1, ref2 = mock.Mock(), mock.Mock() kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) + if read_time is not None: + kwargs["read_time"] = read_time result = await transaction.get_all([ref1, ref2], **kwargs) @@ -326,7 +329,15 @@ async def test_asynctransaction_get_all_w_retry_timeout(): await _get_all_helper(retry=retry, timeout=timeout) -async def _get_w_document_ref_helper(retry=None, timeout=None, explain_options=None): +@pytest.mark.asyncio +async def test_asynctransaction_get_all_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + await _get_all_helper(read_time=read_time) + + +async def _get_w_document_ref_helper( + retry=None, timeout=None, explain_options=None, read_time=None +): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.async_document import AsyncDocumentReference @@ -335,7 +346,12 @@ async def _get_w_document_ref_helper(retry=None, timeout=None, explain_options=N ref = AsyncDocumentReference("documents", "doc-id") kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout) - result = await transaction.get(ref, **kwargs, explain_options=explain_options) + if explain_options is not None: + kwargs["explain_options"] = explain_options + if read_time is not None: + kwargs["read_time"] = read_time + + result = await transaction.get(ref, **kwargs) client.get_all.assert_called_once_with([ref], transaction=transaction, **kwargs) assert result is client.get_all.return_value @@ -356,7 +372,7 @@ async def test_asynctransaction_get_w_document_ref_w_retry_timeout(): @pytest.mark.asyncio -async def test_transaction_get_w_document_ref_w_explain_options(): +async def test_asynctransaction_get_w_document_ref_w_explain_options(): from google.cloud.firestore_v1.query_profile import ExplainOptions with pytest.raises(ValueError, match="`explain_options` cannot be provided."): @@ -365,7 +381,16 @@ async def test_transaction_get_w_document_ref_w_explain_options(): ) -async def _get_w_query_helper(retry=None, timeout=None, explain_options=None): +@pytest.mark.asyncio +async def test_asynctransaction_get_w_document_ref_w_read_time(): + await _get_w_document_ref_helper( + read_time=datetime.datetime.now(tz=datetime.timezone.utc) + ) + + +async def _get_w_query_helper( + retry=None, timeout=None, explain_options=None, read_time=None +): from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1.async_query import AsyncQuery from google.cloud.firestore_v1.async_stream_generator import AsyncStreamGenerator @@ -407,6 +432,7 @@ async def _get_w_query_helper(retry=None, timeout=None, explain_options=None): query, **kwargs, explain_options=explain_options, + read_time=read_time, ) # Verify the response. @@ -435,6 +461,8 @@ async def _get_w_query_helper(retry=None, timeout=None, explain_options=None): } if explain_options is not None: request["explain_options"] = explain_options._to_dict() + if read_time is not None: + request["read_time"] = read_time # Verify the mock call. firestore_api.run_query.assert_called_once_with( @@ -462,6 +490,12 @@ async def test_transaction_get_w_query_w_explain_options(): await _get_w_query_helper(explain_options=ExplainOptions(analyze=True)) +@pytest.mark.asyncio +async def test_asynctransaction_get_w_query_w_read_time(): + read_time = datetime.datetime.now(tz=datetime.timezone.utc) + await _get_w_query_helper(read_time=read_time) + + @pytest.mark.asyncio async def test_asynctransaction_get_failure(): client = _make_client() From 2bbeda27443b403347cd078ae105ef3c8b9115c4 Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Tue, 3 Jun 2025 19:12:20 +0000 Subject: [PATCH 11/14] TYPE_CHECKING --- google/cloud/firestore_v1/aggregation.py | 4 ++-- google/cloud/firestore_v1/base_aggregation.py | 3 ++- google/cloud/firestore_v1/base_collection.py | 3 ++- google/cloud/firestore_v1/base_document.py | 3 ++- google/cloud/firestore_v1/base_query.py | 3 ++- google/cloud/firestore_v1/base_transaction.py | 4 ++-- google/cloud/firestore_v1/client.py | 4 ++-- google/cloud/firestore_v1/collection.py | 4 ++-- google/cloud/firestore_v1/query.py | 4 ++-- google/cloud/firestore_v1/transaction.py | 3 +-- 10 files changed, 19 insertions(+), 16 deletions(-) diff --git a/google/cloud/firestore_v1/aggregation.py b/google/cloud/firestore_v1/aggregation.py index 8d3feb696d..4070cd22b9 100644 --- a/google/cloud/firestore_v1/aggregation.py +++ b/google/cloud/firestore_v1/aggregation.py @@ -20,8 +20,6 @@ """ from __future__ import annotations -import datetime - from typing import TYPE_CHECKING, Any, Generator, List, Optional, Union from google.api_core import exceptions, gapic_v1 @@ -41,6 +39,8 @@ from google.cloud.firestore_v1.query_profile import ExplainMetrics from google.cloud.firestore_v1.query_profile import ExplainOptions + import datetime + class AggregationQuery(BaseAggregationQuery): """Represents an aggregation query to the Firestore API.""" diff --git a/google/cloud/firestore_v1/base_aggregation.py b/google/cloud/firestore_v1/base_aggregation.py index d99c371094..da1af1ec10 100644 --- a/google/cloud/firestore_v1/base_aggregation.py +++ b/google/cloud/firestore_v1/base_aggregation.py @@ -21,7 +21,6 @@ from __future__ import annotations import abc -import datetime from abc import ABC from typing import TYPE_CHECKING, Any, Coroutine, List, Optional, Tuple, Union @@ -45,6 +44,8 @@ StreamGenerator, ) + import datetime + class AggregationResult(object): """ diff --git a/google/cloud/firestore_v1/base_collection.py b/google/cloud/firestore_v1/base_collection.py index 5451097a8f..ada23529de 100644 --- a/google/cloud/firestore_v1/base_collection.py +++ b/google/cloud/firestore_v1/base_collection.py @@ -15,7 +15,6 @@ """Classes for representing collections for the Google Cloud Firestore API.""" from __future__ import annotations -import datetime import random from typing import ( @@ -55,6 +54,8 @@ from google.cloud.firestore_v1.vector import Vector from google.cloud.firestore_v1.vector_query import VectorQuery + import datetime + _AUTO_ID_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" diff --git a/google/cloud/firestore_v1/base_document.py b/google/cloud/firestore_v1/base_document.py index e5f54ef280..517db20d3f 100644 --- a/google/cloud/firestore_v1/base_document.py +++ b/google/cloud/firestore_v1/base_document.py @@ -16,7 +16,6 @@ from __future__ import annotations import copy -import datetime from typing import ( TYPE_CHECKING, @@ -39,6 +38,8 @@ if TYPE_CHECKING: # pragma: NO COVER from google.cloud.firestore_v1.types import Document, firestore, write + import datetime + class BaseDocumentReference(object): """A reference to a document in a Firestore database. diff --git a/google/cloud/firestore_v1/base_query.py b/google/cloud/firestore_v1/base_query.py index 135a2354f3..7f0ca15d2c 100644 --- a/google/cloud/firestore_v1/base_query.py +++ b/google/cloud/firestore_v1/base_query.py @@ -22,7 +22,6 @@ import abc import copy -import datetime import math import warnings @@ -68,6 +67,8 @@ from google.cloud.firestore_v1.query_results import QueryResultsList from google.cloud.firestore_v1.stream_generator import StreamGenerator + import datetime + _BAD_DIR_STRING: str _BAD_OP_NAN: str diff --git a/google/cloud/firestore_v1/base_transaction.py b/google/cloud/firestore_v1/base_transaction.py index 5e1fb74424..297c3f572e 100644 --- a/google/cloud/firestore_v1/base_transaction.py +++ b/google/cloud/firestore_v1/base_transaction.py @@ -15,8 +15,6 @@ """Helpers for applying Google Cloud Firestore changes in a transaction.""" from __future__ import annotations -import datetime - from typing import ( TYPE_CHECKING, Any, @@ -39,6 +37,8 @@ from google.cloud.firestore_v1.stream_generator import StreamGenerator from google.cloud.firestore_v1.types import write as write_pb + import datetime + MAX_ATTEMPTS = 5 """int: Default number of transaction attempts (with retries).""" diff --git a/google/cloud/firestore_v1/client.py b/google/cloud/firestore_v1/client.py index f98e329a29..e39c9056f0 100644 --- a/google/cloud/firestore_v1/client.py +++ b/google/cloud/firestore_v1/client.py @@ -25,8 +25,6 @@ """ from __future__ import annotations -import datetime - from typing import TYPE_CHECKING, Any, Generator, Iterable, List, Optional, Union from google.api_core import gapic_v1 @@ -55,6 +53,8 @@ if TYPE_CHECKING: from google.cloud.firestore_v1.bulk_writer import BulkWriter # pragma: NO COVER + import datetime + class Client(BaseClient): """Client for interacting with Google Cloud Firestore API. diff --git a/google/cloud/firestore_v1/collection.py b/google/cloud/firestore_v1/collection.py index e37f6ad0fc..60788dd71e 100644 --- a/google/cloud/firestore_v1/collection.py +++ b/google/cloud/firestore_v1/collection.py @@ -15,8 +15,6 @@ """Classes for representing collections for the Google Cloud Firestore API.""" from __future__ import annotations -import datetime - from typing import TYPE_CHECKING, Any, Callable, Generator, Optional, Tuple, Union from google.api_core import gapic_v1 @@ -37,6 +35,8 @@ from google.cloud.firestore_v1.query_profile import ExplainOptions from google.cloud.firestore_v1.stream_generator import StreamGenerator + import datetime + class CollectionReference(BaseCollectionReference[query_mod.Query]): """A reference to a collection in a Firestore database. diff --git a/google/cloud/firestore_v1/query.py b/google/cloud/firestore_v1/query.py index a4f56396b0..8b6018b6a5 100644 --- a/google/cloud/firestore_v1/query.py +++ b/google/cloud/firestore_v1/query.py @@ -20,8 +20,6 @@ """ from __future__ import annotations -import datetime - from typing import ( TYPE_CHECKING, Any, @@ -61,6 +59,8 @@ from google.cloud.firestore_v1.field_path import FieldPath from google.cloud.firestore_v1.query_profile import ExplainMetrics, ExplainOptions + import datetime + class Query(BaseQuery): """Represents a query to the Firestore API. diff --git a/google/cloud/firestore_v1/transaction.py b/google/cloud/firestore_v1/transaction.py index 8efafa96cc..d577fd6107 100644 --- a/google/cloud/firestore_v1/transaction.py +++ b/google/cloud/firestore_v1/transaction.py @@ -15,8 +15,6 @@ """Helpers for applying Google Cloud Firestore changes in a transaction.""" from __future__ import annotations -import datetime - from typing import TYPE_CHECKING, Any, Callable, Generator, Optional from google.api_core import exceptions, gapic_v1 @@ -42,6 +40,7 @@ from google.cloud.firestore_v1.query_profile import ExplainOptions from google.cloud.firestore_v1.stream_generator import StreamGenerator + import datetime class Transaction(batch.WriteBatch, BaseTransaction): """Accumulate read-and-write operations to be sent in a transaction. From c6d3c2cf20aca0ac80b688fdb0de7f63afe1cef5 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 3 Jun 2025 19:14:41 +0000 Subject: [PATCH 12/14] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20?= =?UTF-8?q?post-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- google/cloud/firestore_v1/transaction.py | 1 + 1 file changed, 1 insertion(+) diff --git a/google/cloud/firestore_v1/transaction.py b/google/cloud/firestore_v1/transaction.py index d577fd6107..913fc1d3bc 100644 --- a/google/cloud/firestore_v1/transaction.py +++ b/google/cloud/firestore_v1/transaction.py @@ -42,6 +42,7 @@ import datetime + class Transaction(batch.WriteBatch, BaseTransaction): """Accumulate read-and-write operations to be sent in a transaction. From e328d26ac2aba25e292b4ec5d5a4a634d903826b Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 3 Jun 2025 12:22:51 -0700 Subject: [PATCH 13/14] Update client.py fix no cover comment --- google/cloud/firestore_v1/client.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/google/cloud/firestore_v1/client.py b/google/cloud/firestore_v1/client.py index e39c9056f0..ec906f991c 100644 --- a/google/cloud/firestore_v1/client.py +++ b/google/cloud/firestore_v1/client.py @@ -50,9 +50,8 @@ ) from google.cloud.firestore_v1.transaction import Transaction -if TYPE_CHECKING: - from google.cloud.firestore_v1.bulk_writer import BulkWriter # pragma: NO COVER - +if TYPE_CHECKING: # pragma: NO COVER + from google.cloud.firestore_v1.bulk_writer import BulkWriter import datetime From 9fdbbf73cb2731374da7f0da94421eb2443a3052 Mon Sep 17 00:00:00 2001 From: Kevin Zheng Date: Wed, 4 Jun 2025 15:34:01 +0000 Subject: [PATCH 14/14] fixed async system test --- tests/system/test_system_async.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system/test_system_async.py b/tests/system/test_system_async.py index ed9984954d..945e7cb128 100644 --- a/tests/system/test_system_async.py +++ b/tests/system/test_system_async.py @@ -304,7 +304,7 @@ async def test_document_collections_w_read_time(client, cleanup, database): cleanup(document.delete) data = {"now": firestore.SERVER_TIMESTAMP} - document.create(data) + await document.create(data) original_child_ids = ["child1", "child2"] read_time = None