@@ -1003,13 +1003,16 @@ async def default_handler(context: BasicCrawlingContext) -> None:
10031003 # Make sure all requests were handled.
10041004 assert crawler .statistics .state .requests_finished == requests
10051005
1006- # Check the request queue stats
1007- await asyncio .sleep (10 ) # Wait to be sure that metadata are updated
1006+ try :
1007+ # Check the request queue stats
1008+ await asyncio .sleep (10 ) # Wait to be sure that metadata are updated
1009+
1010+ metadata = cast ('ApifyRequestQueueMetadata' , await rq .get_metadata ())
1011+ Actor .log .info (f'{ metadata .stats = } ' )
1012+ assert metadata .stats .write_count == requests * expected_write_count_per_request
10081013
1009- metadata = cast ('ApifyRequestQueueMetadata' , await rq .get_metadata ())
1010- Actor .log .info (f'{ metadata .stats = } ' )
1011- assert metadata .stats .write_count == requests * expected_write_count_per_request
1012- await rq .drop ()
1014+ finally :
1015+ await rq .drop ()
10131016
10141017
10151018async def test_cache_initialization (apify_token : str , monkeypatch : pytest .MonkeyPatch ) -> None :
@@ -1253,16 +1256,17 @@ async def test_same_references_in_named_rq(apify_token: str, monkeypatch: pytest
12531256
12541257 async with Actor :
12551258 rq_by_name_1 = await Actor .open_request_queue (name = rq_name )
1256- rq_by_name_2 = await Actor .open_request_queue (name = rq_name )
1257- assert rq_by_name_1 is rq_by_name_2
1258-
1259- rq_1_metadata = await rq_by_name_1 .get_metadata ()
1260- rq_by_id_1 = await Actor .open_request_queue (id = rq_1_metadata .id )
1261- rq_by_id_2 = await Actor .open_request_queue (id = rq_1_metadata .id )
1262- assert rq_by_id_1 is rq_by_name_1
1263- assert rq_by_id_2 is rq_by_id_1
1264-
1265- await rq_by_name_1 .drop ()
1259+ try :
1260+ rq_by_name_2 = await Actor .open_request_queue (name = rq_name )
1261+ assert rq_by_name_1 is rq_by_name_2
1262+
1263+ rq_1_metadata = await rq_by_name_1 .get_metadata ()
1264+ rq_by_id_1 = await Actor .open_request_queue (id = rq_1_metadata .id )
1265+ rq_by_id_2 = await Actor .open_request_queue (id = rq_1_metadata .id )
1266+ assert rq_by_id_1 is rq_by_name_1
1267+ assert rq_by_id_2 is rq_by_id_1
1268+ finally :
1269+ await rq_by_name_1 .drop ()
12661270
12671271
12681272async def test_request_queue_deduplication (
@@ -1366,63 +1370,63 @@ async def test_concurrent_processing_simulation(apify_token: str, monkeypatch: p
13661370 )
13671371 async with Actor :
13681372 rq = await Actor .open_request_queue ()
1373+ try :
1374+ for i in range (20 ):
1375+ await rq .add_request (f'https://example.com/concurrent/{ i } ' )
13691376
1370- for i in range (20 ):
1371- await rq .add_request (f'https://example.com/concurrent/{ i } ' )
1372-
1373- total_count = await rq .get_total_count ()
1374- assert total_count == 20
1377+ total_count = await rq .get_total_count ()
1378+ assert total_count == 20
13751379
1376- async def worker () -> int :
1377- processed = 0
1378- request_counter = 0
1380+ async def worker () -> int :
1381+ processed = 0
1382+ request_counter = 0
13791383
1380- while request := await rq .fetch_next_request ():
1381- await asyncio .sleep (0.01 )
1384+ while request := await rq .fetch_next_request ():
1385+ await asyncio .sleep (0.01 )
13821386
1383- if request_counter % 5 == 0 and request_counter > 0 :
1384- await rq .reclaim_request (request )
1385- else :
1386- await rq .mark_request_as_handled (request )
1387- processed += 1
1387+ if request_counter % 5 == 0 and request_counter > 0 :
1388+ await rq .reclaim_request (request )
1389+ else :
1390+ await rq .mark_request_as_handled (request )
1391+ processed += 1
13881392
1389- request_counter += 1
1393+ request_counter += 1
13901394
1391- return processed
1395+ return processed
13921396
1393- workers = [worker () for _ in range (3 )]
1394- results = await asyncio .gather (* workers )
1397+ workers = [worker () for _ in range (3 )]
1398+ results = await asyncio .gather (* workers )
13951399
1396- total_processed = sum (results )
1400+ total_processed = sum (results )
13971401
1398- assert total_processed > 0
1399- assert len (results ) == 3
1402+ assert total_processed > 0
1403+ assert len (results ) == 3
14001404
1401- handled_after_workers = await rq .get_handled_count ()
1402- assert handled_after_workers == total_processed
1405+ handled_after_workers = await rq .get_handled_count ()
1406+ assert handled_after_workers == total_processed
14031407
1404- total_after_workers = await rq .get_total_count ()
1405- assert total_after_workers == 20
1408+ total_after_workers = await rq .get_total_count ()
1409+ assert total_after_workers == 20
14061410
1407- remaining_count = 0
1408- while not await rq .is_finished ():
1409- request = await rq .fetch_next_request ()
1410- if request :
1411- remaining_count += 1
1412- await rq .mark_request_as_handled (request )
1413- else :
1414- break
1415-
1416- final_handled = await rq .get_handled_count ()
1417- final_total = await rq .get_total_count ()
1418- assert final_handled == 20
1419- assert final_total == 20
1420- assert total_processed + remaining_count == 20
1411+ remaining_count = 0
1412+ while not await rq .is_finished ():
1413+ request = await rq .fetch_next_request ()
1414+ if request :
1415+ remaining_count += 1
1416+ await rq .mark_request_as_handled (request )
1417+ else :
1418+ break
14211419
1422- is_finished = await rq .is_finished ()
1423- assert is_finished is True
1420+ final_handled = await rq .get_handled_count ()
1421+ final_total = await rq .get_total_count ()
1422+ assert final_handled == 20
1423+ assert final_total == 20
1424+ assert total_processed + remaining_count == 20
14241425
1425- await rq .drop ()
1426+ is_finished = await rq .is_finished ()
1427+ assert is_finished is True
1428+ finally :
1429+ await rq .drop ()
14261430
14271431
14281432async def test_rq_isolation (apify_token : str , monkeypatch : pytest .MonkeyPatch ) -> None :
@@ -1435,22 +1439,22 @@ async def test_rq_isolation(apify_token: str, monkeypatch: pytest.MonkeyPatch) -
14351439 async with Actor :
14361440 rq1 = await Actor .open_request_queue (name = rq_name_1 )
14371441 rq2 = await Actor .open_request_queue (name = rq_name_2 )
1442+ try :
1443+ assert rq1 is not rq2
14381444
1439- assert rq1 is not rq2
1440-
1441- await rq1 .add_request ('https://example.com/queue1-request' )
1442- await rq2 .add_request ('https://example.com/queue2-request' )
1443-
1444- req1 = await rq1 .fetch_next_request ()
1445- req2 = await rq2 .fetch_next_request ()
1445+ await rq1 .add_request ('https://example.com/queue1-request' )
1446+ await rq2 .add_request ('https://example.com/queue2-request' )
14461447
1447- assert req1 is not None
1448- assert 'queue1' in req1 .url
1449- assert req2 is not None
1450- assert 'queue2' in req2 .url
1448+ req1 = await rq1 .fetch_next_request ()
1449+ req2 = await rq2 .fetch_next_request ()
14511450
1452- await rq1 .mark_request_as_handled (req1 )
1453- await rq2 .mark_request_as_handled (req2 )
1451+ assert req1 is not None
1452+ assert 'queue1' in req1 .url
1453+ assert req2 is not None
1454+ assert 'queue2' in req2 .url
14541455
1455- await rq1 .drop ()
1456- await rq2 .drop ()
1456+ await rq1 .mark_request_as_handled (req1 )
1457+ await rq2 .mark_request_as_handled (req2 )
1458+ finally :
1459+ await rq1 .drop ()
1460+ await rq2 .drop ()
0 commit comments