@@ -217,6 +217,40 @@ def test_collection_stream_or_get_w_explain_options_analyze_true(
217217 assert len (execution_stats .debug_stats ) > 0
218218
219219
220+ @pytest .mark .parametrize ("database" , [None , FIRESTORE_OTHER_DB ], indirect = True )
221+ def test_collections_w_read_time (client , cleanup , database ):
222+ first_collection_id = "doc-create" + UNIQUE_RESOURCE_ID
223+ first_document_id = "doc" + UNIQUE_RESOURCE_ID
224+ first_document = client .document (first_collection_id , first_document_id )
225+ # Add to clean-up before API request (in case ``create()`` fails).
226+ cleanup (first_document .delete )
227+
228+ data = {"status" : "new" }
229+ write_result = first_document .create (data )
230+ read_time = write_result .update_time
231+ num_collections = len (list (client .collections ()))
232+
233+ second_collection_id = "doc-create" + UNIQUE_RESOURCE_ID + "-2"
234+ second_document_id = "doc" + UNIQUE_RESOURCE_ID + "-2"
235+ second_document = client .document (second_collection_id , second_document_id )
236+ cleanup (second_document .delete )
237+ second_document .create (data )
238+
239+ # We're just testing that we added one collection at read_time, not two.
240+ collections = list (client .collections (read_time = read_time ))
241+ assert len (collections ) == num_collections
242+ ids = [collection .id for collection in collections ]
243+ assert second_collection_id not in ids
244+ assert first_collection_id in ids
245+
246+ # Test that listing current collections does have the second id.
247+ curr_collections = list (client .collections ())
248+ assert len (curr_collections ) == num_collections + 1
249+ ids = [collection .id for collection in curr_collections ]
250+ assert second_collection_id in ids
251+ assert first_collection_id in ids
252+
253+
220254@pytest .mark .parametrize ("database" , [None , FIRESTORE_OTHER_DB ], indirect = True )
221255def test_create_document (client , cleanup , database ):
222256 now = datetime .datetime .now (tz = datetime .timezone .utc )
@@ -708,6 +742,42 @@ def assert_timestamp_less(timestamp_pb1, timestamp_pb2):
708742 assert timestamp_pb1 < timestamp_pb2
709743
710744
745+ @pytest .mark .parametrize ("database" , [None , FIRESTORE_OTHER_DB ], indirect = True )
746+ def test_document_collections_w_read_time (client , cleanup , database ):
747+ collection_id = "doc-create-sub" + UNIQUE_RESOURCE_ID
748+ document_id = "doc" + UNIQUE_RESOURCE_ID
749+ document = client .document (collection_id , document_id )
750+ # Add to clean-up before API request (in case ``create()`` fails).
751+ cleanup (document .delete )
752+
753+ data = {"now" : firestore .SERVER_TIMESTAMP }
754+ document .create (data )
755+
756+ original_child_ids = ["child1" , "child2" ]
757+ read_time = None
758+
759+ for child_id in original_child_ids :
760+ subcollection = document .collection (child_id )
761+ update_time , subdoc = subcollection .add ({"foo" : "bar" })
762+ read_time = (
763+ update_time if read_time is None or update_time > read_time else read_time
764+ )
765+ cleanup (subdoc .delete )
766+
767+ update_time , newdoc = document .collection ("child3" ).add ({"foo" : "bar" })
768+ cleanup (newdoc .delete )
769+ assert update_time > read_time
770+
771+ # Compare the query at read_time to the query at new update time.
772+ original_children = document .collections (read_time = read_time )
773+ assert sorted (child .id for child in original_children ) == sorted (original_child_ids )
774+
775+ original_children = document .collections ()
776+ assert sorted (child .id for child in original_children ) == sorted (
777+ original_child_ids + ["child3" ]
778+ )
779+
780+
711781@pytest .mark .parametrize ("database" , [None , FIRESTORE_OTHER_DB ], indirect = True )
712782def test_no_document (client , database ):
713783 document_id = "no_document" + UNIQUE_RESOURCE_ID
@@ -1072,6 +1142,31 @@ def test_collection_add(client, cleanup, database):
10721142 assert set (collection3 .list_documents ()) == {document_ref5 }
10731143
10741144
1145+ @pytest .mark .parametrize ("database" , [None , FIRESTORE_OTHER_DB ], indirect = True )
1146+ def test_list_collections_with_read_time (client , cleanup , database ):
1147+ # TODO(microgen): list_documents is returning a generator, not a list.
1148+ # Consider if this is desired. Also, Document isn't hashable.
1149+ collection_id = "coll-add" + UNIQUE_RESOURCE_ID
1150+ collection = client .collection (collection_id )
1151+
1152+ assert set (collection .list_documents ()) == set ()
1153+
1154+ data1 = {"foo" : "bar" }
1155+ update_time1 , document_ref1 = collection .add (data1 )
1156+ cleanup (document_ref1 .delete )
1157+ assert set (collection .list_documents ()) == {document_ref1 }
1158+
1159+ data2 = {"bar" : "baz" }
1160+ update_time2 , document_ref2 = collection .add (data2 )
1161+ cleanup (document_ref2 .delete )
1162+ assert set (collection .list_documents ()) == {document_ref1 , document_ref2 }
1163+ assert set (collection .list_documents (read_time = update_time1 )) == {document_ref1 }
1164+ assert set (collection .list_documents (read_time = update_time2 )) == {
1165+ document_ref1 ,
1166+ document_ref2 ,
1167+ }
1168+
1169+
10751170@pytest .fixture
10761171def query_docs (client , database ):
10771172 collection_id = "qs" + UNIQUE_RESOURCE_ID
@@ -1450,6 +1545,44 @@ def test_query_stream_or_get_w_explain_options_analyze_false(
14501545 explain_metrics .execution_stats
14511546
14521547
1548+ @pytest .mark .parametrize ("database" , [None , FIRESTORE_OTHER_DB ], indirect = True )
1549+ def test_query_stream_w_read_time (query_docs , cleanup , database ):
1550+ collection , stored , allowed_vals = query_docs
1551+ num_vals = len (allowed_vals )
1552+
1553+ # Find the most recent read_time in collections
1554+ read_time = max (docref .get ().read_time for docref in collection .list_documents ())
1555+ new_data = {
1556+ "a" : 9000 ,
1557+ "b" : 1 ,
1558+ "c" : [10000 , 1000 ],
1559+ "stats" : {"sum" : 9001 , "product" : 9000 },
1560+ }
1561+ _ , new_ref = collection .add (new_data )
1562+ # Add to clean-up.
1563+ cleanup (new_ref .delete )
1564+ stored [new_ref .id ] = new_data
1565+
1566+ # Compare query at read_time to query at current time.
1567+ query = collection .where (filter = FieldFilter ("b" , "==" , 1 ))
1568+ values = {
1569+ snapshot .id : snapshot .to_dict ()
1570+ for snapshot in query .stream (read_time = read_time )
1571+ }
1572+ assert len (values ) == num_vals
1573+ assert new_ref .id not in values
1574+ for key , value in values .items ():
1575+ assert stored [key ] == value
1576+ assert value ["b" ] == 1
1577+ assert value ["a" ] != 9000
1578+ assert key != new_ref
1579+
1580+ new_values = {snapshot .id : snapshot .to_dict () for snapshot in query .stream ()}
1581+ assert len (new_values ) == num_vals + 1
1582+ assert new_ref .id in new_values
1583+ assert new_values [new_ref .id ] == new_data
1584+
1585+
14531586@pytest .mark .parametrize ("database" , [None , FIRESTORE_OTHER_DB ], indirect = True )
14541587def test_query_with_order_dot_key (client , cleanup , database ):
14551588 db = client
@@ -1760,13 +1893,16 @@ def test_get_all(client, cleanup, database):
17601893 document3 = client .document (collection_name , "c" )
17611894 # Add to clean-up before API requests (in case ``create()`` fails).
17621895 cleanup (document1 .delete )
1896+ cleanup (document2 .delete )
17631897 cleanup (document3 .delete )
17641898
17651899 data1 = {"a" : {"b" : 2 , "c" : 3 }, "d" : 4 , "e" : 0 }
17661900 write_result1 = document1 .create (data1 )
17671901 data3 = {"a" : {"b" : 5 , "c" : 6 }, "d" : 7 , "e" : 100 }
17681902 write_result3 = document3 .create (data3 )
17691903
1904+ read_time = write_result3 .update_time
1905+
17701906 # 0. Get 3 unique documents, one of which is missing.
17711907 snapshots = list (client .get_all ([document1 , document2 , document3 ]))
17721908
@@ -1802,6 +1938,27 @@ def test_get_all(client, cleanup, database):
18021938 restricted3 = {"a" : {"b" : data3 ["a" ]["b" ]}, "d" : data3 ["d" ]}
18031939 check_snapshot (snapshot3 , document3 , restricted3 , write_result3 )
18041940
1941+ # 3. Use ``read_time`` in ``get_all``
1942+ new_data = {"a" : {"b" : 8 , "c" : 9 }, "d" : 10 , "e" : 1010 }
1943+ document1 .update (new_data )
1944+ document2 .create (new_data )
1945+ document3 .update (new_data )
1946+
1947+ snapshots = list (
1948+ client .get_all ([document1 , document2 , document3 ], read_time = read_time )
1949+ )
1950+ assert snapshots [0 ].exists
1951+ assert snapshots [1 ].exists
1952+ assert not snapshots [2 ].exists
1953+
1954+ snapshots = [snapshot for snapshot in snapshots if snapshot .exists ]
1955+ id_attr = operator .attrgetter ("id" )
1956+ snapshots .sort (key = id_attr )
1957+
1958+ snapshot1 , snapshot3 = snapshots
1959+ check_snapshot (snapshot1 , document1 , data1 , write_result1 )
1960+ check_snapshot (snapshot3 , document3 , data3 , write_result3 )
1961+
18051962
18061963@pytest .mark .parametrize ("database" , [None , FIRESTORE_OTHER_DB ], indirect = True )
18071964def test_batch (client , cleanup , database ):
@@ -3015,6 +3172,48 @@ def test_query_with_or_composite_filter(collection, database):
30153172 assert lt_10 > 0
30163173
30173174
3175+ @pytest .mark .parametrize ("database" , [None , FIRESTORE_OTHER_DB ], indirect = True )
3176+ @pytest .mark .parametrize (
3177+ "aggregation_type,expected_value" , [("count" , 5 ), ("sum" , 100 ), ("avg" , 4.0 )]
3178+ )
3179+ def test_aggregation_queries_with_read_time (
3180+ collection , query , cleanup , database , aggregation_type , expected_value
3181+ ):
3182+ """
3183+ Ensure that all aggregation queries work when read_time is passed into
3184+ a query.<aggregation_type>.().get() method
3185+ """
3186+ # Find the most recent read_time in collections
3187+ read_time = max (docref .get ().read_time for docref in collection .list_documents ())
3188+ document_data = {
3189+ "a" : 1 ,
3190+ "b" : 9000 ,
3191+ "c" : [1 , 123123123 ],
3192+ "stats" : {"sum" : 9001 , "product" : 9000 },
3193+ }
3194+
3195+ _ , doc_ref = collection .add (document_data )
3196+ cleanup (doc_ref .delete )
3197+
3198+ if aggregation_type == "count" :
3199+ aggregation_query = query .count ()
3200+ elif aggregation_type == "sum" :
3201+ aggregation_query = collection .sum ("stats.product" )
3202+ elif aggregation_type == "avg" :
3203+ aggregation_query = collection .avg ("stats.product" )
3204+
3205+ # Check that adding the new document data affected the results of the aggregation queries.
3206+ new_result = aggregation_query .get ()
3207+ assert len (new_result ) == 1
3208+ for r in new_result [0 ]:
3209+ assert r .value != expected_value
3210+
3211+ old_result = aggregation_query .get (read_time = read_time )
3212+ assert len (old_result ) == 1
3213+ for r in old_result [0 ]:
3214+ assert r .value == expected_value
3215+
3216+
30183217@pytest .mark .parametrize ("database" , [None , FIRESTORE_OTHER_DB ], indirect = True )
30193218def test_query_with_complex_composite_filter (collection , database ):
30203219 field_filter = FieldFilter ("b" , "==" , 0 )
@@ -3228,6 +3427,52 @@ def in_transaction(transaction):
32283427 assert inner_fn_ran is True
32293428
32303429
3430+ @pytest .mark .parametrize ("database" , [None , FIRESTORE_OTHER_DB ], indirect = True )
3431+ def test_query_in_transaction_with_read_time (client , cleanup , database ):
3432+ """
3433+ Test query profiling in transactions.
3434+ """
3435+ collection_id = "doc-create" + UNIQUE_RESOURCE_ID
3436+ doc_ids = [f"doc{ i } " + UNIQUE_RESOURCE_ID for i in range (5 )]
3437+ doc_refs = [client .document (collection_id , doc_id ) for doc_id in doc_ids ]
3438+ for doc_ref in doc_refs :
3439+ cleanup (doc_ref .delete )
3440+ doc_refs [0 ].create ({"a" : 1 , "b" : 2 })
3441+ doc_refs [1 ].create ({"a" : 1 , "b" : 1 })
3442+
3443+ read_time = max (docref .get ().read_time for docref in doc_refs )
3444+ doc_refs [2 ].create ({"a" : 1 , "b" : 3 })
3445+
3446+ collection = client .collection (collection_id )
3447+ query = collection .where (filter = FieldFilter ("a" , "==" , 1 ))
3448+
3449+ with client .transaction () as transaction :
3450+ # should work when transaction is initiated through transactional decorator
3451+ @firestore .transactional
3452+ def in_transaction (transaction ):
3453+ global inner_fn_ran
3454+
3455+ new_b_values = [
3456+ docs .get ("b" ) for docs in transaction .get (query , read_time = read_time )
3457+ ]
3458+ assert len (new_b_values ) == 2
3459+ assert 1 in new_b_values
3460+ assert 2 in new_b_values
3461+ assert 3 not in new_b_values
3462+
3463+ new_b_values = [docs .get ("b" ) for docs in transaction .get (query )]
3464+ assert len (new_b_values ) == 3
3465+ assert 1 in new_b_values
3466+ assert 2 in new_b_values
3467+ assert 3 in new_b_values
3468+
3469+ inner_fn_ran = True
3470+
3471+ in_transaction (transaction )
3472+ # make sure we didn't skip assertions in inner function
3473+ assert inner_fn_ran is True
3474+
3475+
32313476@pytest .mark .parametrize ("database" , [None , FIRESTORE_OTHER_DB ], indirect = True )
32323477def test_update_w_uuid (client , cleanup , database ):
32333478 """
0 commit comments