Skip to content

Commit 4b51fc1

Browse files
vdusekclaude
andcommitted
test: address PR review feedback
- Parametrize tests with multiple assertions into single-assert-per-case - Remove redundant service_locator._configuration = None (handled by autouse fixture) - Inject mocked API clients to avoid accessing private _api_client attribute - Move drop() calls into finally blocks for cleanup on test failure - Always include is_running_in_ipython flag in get_system_info (True/False) - Remove duplicate assert in test_create_same_hmac - Remove unnecessary type: ignore comment Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent 61f2088 commit 4b51fc1

File tree

11 files changed

+184
-150
lines changed

11 files changed

+184
-150
lines changed

src/apify/_utils.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,7 @@ def get_system_info() -> dict:
2121
'os': sys.platform,
2222
}
2323

24-
if is_running_in_ipython():
25-
system_info['is_running_in_ipython'] = True
24+
system_info['is_running_in_ipython'] = is_running_in_ipython()
2625

2726
return system_info
2827

tests/integration/test_dataset.py

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -82,16 +82,17 @@ async def test_same_references_in_named_dataset(apify_token: str, monkeypatch: p
8282

8383
async with Actor:
8484
dataset_by_name_1 = await Actor.open_dataset(name=dataset_name)
85-
dataset_by_name_2 = await Actor.open_dataset(name=dataset_name)
86-
assert dataset_by_name_1 is dataset_by_name_2
87-
88-
dataset_1_metadata = await dataset_by_name_1.get_metadata()
89-
dataset_by_id_1 = await Actor.open_dataset(id=dataset_1_metadata.id)
90-
dataset_by_id_2 = await Actor.open_dataset(id=dataset_1_metadata.id)
91-
assert dataset_by_id_1 is dataset_by_name_1
92-
assert dataset_by_id_2 is dataset_by_id_1
93-
94-
await dataset_by_name_1.drop()
85+
try:
86+
dataset_by_name_2 = await Actor.open_dataset(name=dataset_name)
87+
assert dataset_by_name_1 is dataset_by_name_2
88+
89+
dataset_1_metadata = await dataset_by_name_1.get_metadata()
90+
dataset_by_id_1 = await Actor.open_dataset(id=dataset_1_metadata.id)
91+
dataset_by_id_2 = await Actor.open_dataset(id=dataset_1_metadata.id)
92+
assert dataset_by_id_1 is dataset_by_name_1
93+
assert dataset_by_id_2 is dataset_by_id_1
94+
finally:
95+
await dataset_by_name_1.drop()
9596

9697

9798
async def test_force_cloud(

tests/integration/test_key_value_store.py

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -46,16 +46,17 @@ async def test_same_references_in_named_kvs(apify_token: str, monkeypatch: pytes
4646

4747
async with Actor:
4848
kvs_by_name_1 = await Actor.open_key_value_store(name=kvs_name)
49-
kvs_by_name_2 = await Actor.open_key_value_store(name=kvs_name)
50-
assert kvs_by_name_1 is kvs_by_name_2
51-
52-
kvs_1_metadata = await kvs_by_name_1.get_metadata()
53-
kvs_by_id_1 = await Actor.open_key_value_store(id=kvs_1_metadata.id)
54-
kvs_by_id_2 = await Actor.open_key_value_store(id=kvs_1_metadata.id)
55-
assert kvs_by_id_1 is kvs_by_name_1
56-
assert kvs_by_id_2 is kvs_by_id_1
57-
58-
await kvs_by_name_1.drop()
49+
try:
50+
kvs_by_name_2 = await Actor.open_key_value_store(name=kvs_name)
51+
assert kvs_by_name_1 is kvs_by_name_2
52+
53+
kvs_1_metadata = await kvs_by_name_1.get_metadata()
54+
kvs_by_id_1 = await Actor.open_key_value_store(id=kvs_1_metadata.id)
55+
kvs_by_id_2 = await Actor.open_key_value_store(id=kvs_1_metadata.id)
56+
assert kvs_by_id_1 is kvs_by_name_1
57+
assert kvs_by_id_2 is kvs_by_id_1
58+
finally:
59+
await kvs_by_name_1.drop()
5960

6061

6162
async def test_set_and_get_value_in_same_run(key_value_store_apify: KeyValueStore) -> None:

tests/integration/test_request_queue.py

Lines changed: 78 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -1004,13 +1004,16 @@ async def default_handler(context: BasicCrawlingContext) -> None:
10041004
# Make sure all requests were handled.
10051005
assert crawler.statistics.state.requests_finished == requests
10061006

1007-
# Check the request queue stats
1008-
await asyncio.sleep(10) # Wait to be sure that metadata are updated
1007+
try:
1008+
# Check the request queue stats
1009+
await asyncio.sleep(10) # Wait to be sure that metadata are updated
1010+
1011+
metadata = cast('ApifyRequestQueueMetadata', await rq.get_metadata())
1012+
Actor.log.info(f'{metadata.stats=}')
1013+
assert metadata.stats.write_count == requests * expected_write_count_per_request
10091014

1010-
metadata = cast('ApifyRequestQueueMetadata', await rq.get_metadata())
1011-
Actor.log.info(f'{metadata.stats=}')
1012-
assert metadata.stats.write_count == requests * expected_write_count_per_request
1013-
await rq.drop()
1015+
finally:
1016+
await rq.drop()
10141017

10151018

10161019
async def test_cache_initialization(apify_token: str, monkeypatch: pytest.MonkeyPatch) -> None:
@@ -1262,16 +1265,17 @@ async def test_same_references_in_named_rq(apify_token: str, monkeypatch: pytest
12621265

12631266
async with Actor:
12641267
rq_by_name_1 = await Actor.open_request_queue(name=rq_name)
1265-
rq_by_name_2 = await Actor.open_request_queue(name=rq_name)
1266-
assert rq_by_name_1 is rq_by_name_2
1267-
1268-
rq_1_metadata = await rq_by_name_1.get_metadata()
1269-
rq_by_id_1 = await Actor.open_request_queue(id=rq_1_metadata.id)
1270-
rq_by_id_2 = await Actor.open_request_queue(id=rq_1_metadata.id)
1271-
assert rq_by_id_1 is rq_by_name_1
1272-
assert rq_by_id_2 is rq_by_id_1
1273-
1274-
await rq_by_name_1.drop()
1268+
try:
1269+
rq_by_name_2 = await Actor.open_request_queue(name=rq_name)
1270+
assert rq_by_name_1 is rq_by_name_2
1271+
1272+
rq_1_metadata = await rq_by_name_1.get_metadata()
1273+
rq_by_id_1 = await Actor.open_request_queue(id=rq_1_metadata.id)
1274+
rq_by_id_2 = await Actor.open_request_queue(id=rq_1_metadata.id)
1275+
assert rq_by_id_1 is rq_by_name_1
1276+
assert rq_by_id_2 is rq_by_id_1
1277+
finally:
1278+
await rq_by_name_1.drop()
12751279

12761280

12771281
async def test_request_queue_deduplication(
@@ -1375,63 +1379,63 @@ async def test_concurrent_processing_simulation(apify_token: str, monkeypatch: p
13751379
)
13761380
async with Actor:
13771381
rq = await Actor.open_request_queue()
1382+
try:
1383+
for i in range(20):
1384+
await rq.add_request(f'https://example.com/concurrent/{i}')
13781385

1379-
for i in range(20):
1380-
await rq.add_request(f'https://example.com/concurrent/{i}')
1381-
1382-
total_count = await rq.get_total_count()
1383-
assert total_count == 20
1386+
total_count = await rq.get_total_count()
1387+
assert total_count == 20
13841388

1385-
async def worker() -> int:
1386-
processed = 0
1387-
request_counter = 0
1389+
async def worker() -> int:
1390+
processed = 0
1391+
request_counter = 0
13881392

1389-
while request := await rq.fetch_next_request():
1390-
await asyncio.sleep(0.01)
1393+
while request := await rq.fetch_next_request():
1394+
await asyncio.sleep(0.01)
13911395

1392-
if request_counter % 5 == 0 and request_counter > 0:
1393-
await rq.reclaim_request(request)
1394-
else:
1395-
await rq.mark_request_as_handled(request)
1396-
processed += 1
1396+
if request_counter % 5 == 0 and request_counter > 0:
1397+
await rq.reclaim_request(request)
1398+
else:
1399+
await rq.mark_request_as_handled(request)
1400+
processed += 1
13971401

1398-
request_counter += 1
1402+
request_counter += 1
13991403

1400-
return processed
1404+
return processed
14011405

1402-
workers = [worker() for _ in range(3)]
1403-
results = await asyncio.gather(*workers)
1406+
workers = [worker() for _ in range(3)]
1407+
results = await asyncio.gather(*workers)
14041408

1405-
total_processed = sum(results)
1409+
total_processed = sum(results)
14061410

1407-
assert total_processed > 0
1408-
assert len(results) == 3
1411+
assert total_processed > 0
1412+
assert len(results) == 3
14091413

1410-
handled_after_workers = await rq.get_handled_count()
1411-
assert handled_after_workers == total_processed
1414+
handled_after_workers = await rq.get_handled_count()
1415+
assert handled_after_workers == total_processed
14121416

1413-
total_after_workers = await rq.get_total_count()
1414-
assert total_after_workers == 20
1417+
total_after_workers = await rq.get_total_count()
1418+
assert total_after_workers == 20
14151419

1416-
remaining_count = 0
1417-
while not await rq.is_finished():
1418-
request = await rq.fetch_next_request()
1419-
if request:
1420-
remaining_count += 1
1421-
await rq.mark_request_as_handled(request)
1422-
else:
1423-
break
1424-
1425-
final_handled = await rq.get_handled_count()
1426-
final_total = await rq.get_total_count()
1427-
assert final_handled == 20
1428-
assert final_total == 20
1429-
assert total_processed + remaining_count == 20
1420+
remaining_count = 0
1421+
while not await rq.is_finished():
1422+
request = await rq.fetch_next_request()
1423+
if request:
1424+
remaining_count += 1
1425+
await rq.mark_request_as_handled(request)
1426+
else:
1427+
break
14301428

1431-
is_finished = await rq.is_finished()
1432-
assert is_finished is True
1429+
final_handled = await rq.get_handled_count()
1430+
final_total = await rq.get_total_count()
1431+
assert final_handled == 20
1432+
assert final_total == 20
1433+
assert total_processed + remaining_count == 20
14331434

1434-
await rq.drop()
1435+
is_finished = await rq.is_finished()
1436+
assert is_finished is True
1437+
finally:
1438+
await rq.drop()
14351439

14361440

14371441
async def test_rq_isolation(apify_token: str, monkeypatch: pytest.MonkeyPatch) -> None:
@@ -1444,22 +1448,22 @@ async def test_rq_isolation(apify_token: str, monkeypatch: pytest.MonkeyPatch) -
14441448
async with Actor:
14451449
rq1 = await Actor.open_request_queue(name=rq_name_1)
14461450
rq2 = await Actor.open_request_queue(name=rq_name_2)
1451+
try:
1452+
assert rq1 is not rq2
14471453

1448-
assert rq1 is not rq2
1449-
1450-
await rq1.add_request('https://example.com/queue1-request')
1451-
await rq2.add_request('https://example.com/queue2-request')
1452-
1453-
req1 = await rq1.fetch_next_request()
1454-
req2 = await rq2.fetch_next_request()
1454+
await rq1.add_request('https://example.com/queue1-request')
1455+
await rq2.add_request('https://example.com/queue2-request')
14551456

1456-
assert req1 is not None
1457-
assert 'queue1' in req1.url
1458-
assert req2 is not None
1459-
assert 'queue2' in req2.url
1457+
req1 = await rq1.fetch_next_request()
1458+
req2 = await rq2.fetch_next_request()
14601459

1461-
await rq1.mark_request_as_handled(req1)
1462-
await rq2.mark_request_as_handled(req2)
1460+
assert req1 is not None
1461+
assert 'queue1' in req1.url
1462+
assert req2 is not None
1463+
assert 'queue2' in req2.url
14631464

1464-
await rq1.drop()
1465-
await rq2.drop()
1465+
await rq1.mark_request_as_handled(req1)
1466+
await rq2.mark_request_as_handled(req2)
1467+
finally:
1468+
await rq1.drop()
1469+
await rq2.drop()

tests/unit/actor/test_actor_helpers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -296,7 +296,7 @@ async def test_remote_method_with_invalid_timeout(
296296
async with Actor:
297297
actor_method = getattr(Actor, actor_method_name)
298298
with pytest.raises(ValueError, match='Invalid timeout'):
299-
await actor_method(entity_id, timeout='invalid') # type: ignore[arg-type]
299+
await actor_method(entity_id, timeout='invalid')
300300

301301

302302
async def test_abort_with_status_message(

tests/unit/storage_clients/test_apify_dataset_client.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,24 +8,30 @@
88
from apify.storage_clients._apify._dataset_client import ApifyDatasetClient
99

1010

11-
def _make_dataset_client() -> ApifyDatasetClient:
11+
def _make_dataset_client(api_client: AsyncMock | None = None) -> tuple[ApifyDatasetClient, AsyncMock]:
1212
"""Create an ApifyDatasetClient with a mocked API client."""
13-
api_client = AsyncMock()
14-
return ApifyDatasetClient(api_client=api_client, api_public_base_url='', lock=asyncio.Lock())
13+
if api_client is None:
14+
api_client = AsyncMock()
15+
16+
return ApifyDatasetClient(
17+
api_client=api_client,
18+
api_public_base_url='',
19+
lock=asyncio.Lock(),
20+
), api_client
1521

1622

1723
async def test_purge_raises_not_implemented() -> None:
1824
"""Test that purge() raises NotImplementedError."""
19-
client = _make_dataset_client()
25+
client, _ = _make_dataset_client()
2026
with pytest.raises(NotImplementedError, match='Purging datasets is not supported'):
2127
await client.purge()
2228

2329

2430
async def test_drop_calls_api_delete() -> None:
2531
"""Test that drop() delegates to the API client."""
26-
client = _make_dataset_client()
32+
client, api_client = _make_dataset_client()
2733
await client.drop()
28-
client._api_client.delete.assert_awaited_once() # ty: ignore[possibly-missing-attribute]
34+
api_client.delete.assert_awaited_once()
2935

3036

3137
async def test_deprecated_api_public_base_url() -> None:

0 commit comments

Comments
 (0)