Skip to content

Commit 6f93094

Browse files
vdusekclaude
andcommitted
test: address PR review feedback
- Parametrize tests with multiple assertions into single-assert-per-case - Remove redundant service_locator._configuration = None (handled by autouse fixture) - Inject mocked API clients to avoid accessing private _api_client attribute - Move drop() calls into finally blocks for cleanup on test failure - Always include is_running_in_ipython flag in get_system_info (True/False) - Remove duplicate assert in test_create_same_hmac - Remove unnecessary type: ignore comment Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent 1aadf03 commit 6f93094

File tree

11 files changed

+184
-150
lines changed

11 files changed

+184
-150
lines changed

src/apify/_utils.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,7 @@ def get_system_info() -> dict:
2121
'os': sys.platform,
2222
}
2323

24-
if is_running_in_ipython():
25-
system_info['is_running_in_ipython'] = True
24+
system_info['is_running_in_ipython'] = is_running_in_ipython()
2625

2726
return system_info
2827

tests/integration/test_dataset.py

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -82,16 +82,17 @@ async def test_same_references_in_named_dataset(apify_token: str, monkeypatch: p
8282

8383
async with Actor:
8484
dataset_by_name_1 = await Actor.open_dataset(name=dataset_name)
85-
dataset_by_name_2 = await Actor.open_dataset(name=dataset_name)
86-
assert dataset_by_name_1 is dataset_by_name_2
87-
88-
dataset_1_metadata = await dataset_by_name_1.get_metadata()
89-
dataset_by_id_1 = await Actor.open_dataset(id=dataset_1_metadata.id)
90-
dataset_by_id_2 = await Actor.open_dataset(id=dataset_1_metadata.id)
91-
assert dataset_by_id_1 is dataset_by_name_1
92-
assert dataset_by_id_2 is dataset_by_id_1
93-
94-
await dataset_by_name_1.drop()
85+
try:
86+
dataset_by_name_2 = await Actor.open_dataset(name=dataset_name)
87+
assert dataset_by_name_1 is dataset_by_name_2
88+
89+
dataset_1_metadata = await dataset_by_name_1.get_metadata()
90+
dataset_by_id_1 = await Actor.open_dataset(id=dataset_1_metadata.id)
91+
dataset_by_id_2 = await Actor.open_dataset(id=dataset_1_metadata.id)
92+
assert dataset_by_id_1 is dataset_by_name_1
93+
assert dataset_by_id_2 is dataset_by_id_1
94+
finally:
95+
await dataset_by_name_1.drop()
9596

9697

9798
async def test_force_cloud(

tests/integration/test_key_value_store.py

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -46,16 +46,17 @@ async def test_same_references_in_named_kvs(apify_token: str, monkeypatch: pytes
4646

4747
async with Actor:
4848
kvs_by_name_1 = await Actor.open_key_value_store(name=kvs_name)
49-
kvs_by_name_2 = await Actor.open_key_value_store(name=kvs_name)
50-
assert kvs_by_name_1 is kvs_by_name_2
51-
52-
kvs_1_metadata = await kvs_by_name_1.get_metadata()
53-
kvs_by_id_1 = await Actor.open_key_value_store(id=kvs_1_metadata.id)
54-
kvs_by_id_2 = await Actor.open_key_value_store(id=kvs_1_metadata.id)
55-
assert kvs_by_id_1 is kvs_by_name_1
56-
assert kvs_by_id_2 is kvs_by_id_1
57-
58-
await kvs_by_name_1.drop()
49+
try:
50+
kvs_by_name_2 = await Actor.open_key_value_store(name=kvs_name)
51+
assert kvs_by_name_1 is kvs_by_name_2
52+
53+
kvs_1_metadata = await kvs_by_name_1.get_metadata()
54+
kvs_by_id_1 = await Actor.open_key_value_store(id=kvs_1_metadata.id)
55+
kvs_by_id_2 = await Actor.open_key_value_store(id=kvs_1_metadata.id)
56+
assert kvs_by_id_1 is kvs_by_name_1
57+
assert kvs_by_id_2 is kvs_by_id_1
58+
finally:
59+
await kvs_by_name_1.drop()
5960

6061

6162
async def test_set_and_get_value_in_same_run(key_value_store_apify: KeyValueStore) -> None:

tests/integration/test_request_queue.py

Lines changed: 78 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -1003,13 +1003,16 @@ async def default_handler(context: BasicCrawlingContext) -> None:
10031003
# Make sure all requests were handled.
10041004
assert crawler.statistics.state.requests_finished == requests
10051005

1006-
# Check the request queue stats
1007-
await asyncio.sleep(10) # Wait to be sure that metadata are updated
1006+
try:
1007+
# Check the request queue stats
1008+
await asyncio.sleep(10) # Wait to be sure that metadata are updated
1009+
1010+
metadata = cast('ApifyRequestQueueMetadata', await rq.get_metadata())
1011+
Actor.log.info(f'{metadata.stats=}')
1012+
assert metadata.stats.write_count == requests * expected_write_count_per_request
10081013

1009-
metadata = cast('ApifyRequestQueueMetadata', await rq.get_metadata())
1010-
Actor.log.info(f'{metadata.stats=}')
1011-
assert metadata.stats.write_count == requests * expected_write_count_per_request
1012-
await rq.drop()
1014+
finally:
1015+
await rq.drop()
10131016

10141017

10151018
async def test_cache_initialization(apify_token: str, monkeypatch: pytest.MonkeyPatch) -> None:
@@ -1253,16 +1256,17 @@ async def test_same_references_in_named_rq(apify_token: str, monkeypatch: pytest
12531256

12541257
async with Actor:
12551258
rq_by_name_1 = await Actor.open_request_queue(name=rq_name)
1256-
rq_by_name_2 = await Actor.open_request_queue(name=rq_name)
1257-
assert rq_by_name_1 is rq_by_name_2
1258-
1259-
rq_1_metadata = await rq_by_name_1.get_metadata()
1260-
rq_by_id_1 = await Actor.open_request_queue(id=rq_1_metadata.id)
1261-
rq_by_id_2 = await Actor.open_request_queue(id=rq_1_metadata.id)
1262-
assert rq_by_id_1 is rq_by_name_1
1263-
assert rq_by_id_2 is rq_by_id_1
1264-
1265-
await rq_by_name_1.drop()
1259+
try:
1260+
rq_by_name_2 = await Actor.open_request_queue(name=rq_name)
1261+
assert rq_by_name_1 is rq_by_name_2
1262+
1263+
rq_1_metadata = await rq_by_name_1.get_metadata()
1264+
rq_by_id_1 = await Actor.open_request_queue(id=rq_1_metadata.id)
1265+
rq_by_id_2 = await Actor.open_request_queue(id=rq_1_metadata.id)
1266+
assert rq_by_id_1 is rq_by_name_1
1267+
assert rq_by_id_2 is rq_by_id_1
1268+
finally:
1269+
await rq_by_name_1.drop()
12661270

12671271

12681272
async def test_request_queue_deduplication(
@@ -1366,63 +1370,63 @@ async def test_concurrent_processing_simulation(apify_token: str, monkeypatch: p
13661370
)
13671371
async with Actor:
13681372
rq = await Actor.open_request_queue()
1373+
try:
1374+
for i in range(20):
1375+
await rq.add_request(f'https://example.com/concurrent/{i}')
13691376

1370-
for i in range(20):
1371-
await rq.add_request(f'https://example.com/concurrent/{i}')
1372-
1373-
total_count = await rq.get_total_count()
1374-
assert total_count == 20
1377+
total_count = await rq.get_total_count()
1378+
assert total_count == 20
13751379

1376-
async def worker() -> int:
1377-
processed = 0
1378-
request_counter = 0
1380+
async def worker() -> int:
1381+
processed = 0
1382+
request_counter = 0
13791383

1380-
while request := await rq.fetch_next_request():
1381-
await asyncio.sleep(0.01)
1384+
while request := await rq.fetch_next_request():
1385+
await asyncio.sleep(0.01)
13821386

1383-
if request_counter % 5 == 0 and request_counter > 0:
1384-
await rq.reclaim_request(request)
1385-
else:
1386-
await rq.mark_request_as_handled(request)
1387-
processed += 1
1387+
if request_counter % 5 == 0 and request_counter > 0:
1388+
await rq.reclaim_request(request)
1389+
else:
1390+
await rq.mark_request_as_handled(request)
1391+
processed += 1
13881392

1389-
request_counter += 1
1393+
request_counter += 1
13901394

1391-
return processed
1395+
return processed
13921396

1393-
workers = [worker() for _ in range(3)]
1394-
results = await asyncio.gather(*workers)
1397+
workers = [worker() for _ in range(3)]
1398+
results = await asyncio.gather(*workers)
13951399

1396-
total_processed = sum(results)
1400+
total_processed = sum(results)
13971401

1398-
assert total_processed > 0
1399-
assert len(results) == 3
1402+
assert total_processed > 0
1403+
assert len(results) == 3
14001404

1401-
handled_after_workers = await rq.get_handled_count()
1402-
assert handled_after_workers == total_processed
1405+
handled_after_workers = await rq.get_handled_count()
1406+
assert handled_after_workers == total_processed
14031407

1404-
total_after_workers = await rq.get_total_count()
1405-
assert total_after_workers == 20
1408+
total_after_workers = await rq.get_total_count()
1409+
assert total_after_workers == 20
14061410

1407-
remaining_count = 0
1408-
while not await rq.is_finished():
1409-
request = await rq.fetch_next_request()
1410-
if request:
1411-
remaining_count += 1
1412-
await rq.mark_request_as_handled(request)
1413-
else:
1414-
break
1415-
1416-
final_handled = await rq.get_handled_count()
1417-
final_total = await rq.get_total_count()
1418-
assert final_handled == 20
1419-
assert final_total == 20
1420-
assert total_processed + remaining_count == 20
1411+
remaining_count = 0
1412+
while not await rq.is_finished():
1413+
request = await rq.fetch_next_request()
1414+
if request:
1415+
remaining_count += 1
1416+
await rq.mark_request_as_handled(request)
1417+
else:
1418+
break
14211419

1422-
is_finished = await rq.is_finished()
1423-
assert is_finished is True
1420+
final_handled = await rq.get_handled_count()
1421+
final_total = await rq.get_total_count()
1422+
assert final_handled == 20
1423+
assert final_total == 20
1424+
assert total_processed + remaining_count == 20
14241425

1425-
await rq.drop()
1426+
is_finished = await rq.is_finished()
1427+
assert is_finished is True
1428+
finally:
1429+
await rq.drop()
14261430

14271431

14281432
async def test_rq_isolation(apify_token: str, monkeypatch: pytest.MonkeyPatch) -> None:
@@ -1435,22 +1439,22 @@ async def test_rq_isolation(apify_token: str, monkeypatch: pytest.MonkeyPatch) -
14351439
async with Actor:
14361440
rq1 = await Actor.open_request_queue(name=rq_name_1)
14371441
rq2 = await Actor.open_request_queue(name=rq_name_2)
1442+
try:
1443+
assert rq1 is not rq2
14381444

1439-
assert rq1 is not rq2
1440-
1441-
await rq1.add_request('https://example.com/queue1-request')
1442-
await rq2.add_request('https://example.com/queue2-request')
1443-
1444-
req1 = await rq1.fetch_next_request()
1445-
req2 = await rq2.fetch_next_request()
1445+
await rq1.add_request('https://example.com/queue1-request')
1446+
await rq2.add_request('https://example.com/queue2-request')
14461447

1447-
assert req1 is not None
1448-
assert 'queue1' in req1.url
1449-
assert req2 is not None
1450-
assert 'queue2' in req2.url
1448+
req1 = await rq1.fetch_next_request()
1449+
req2 = await rq2.fetch_next_request()
14511450

1452-
await rq1.mark_request_as_handled(req1)
1453-
await rq2.mark_request_as_handled(req2)
1451+
assert req1 is not None
1452+
assert 'queue1' in req1.url
1453+
assert req2 is not None
1454+
assert 'queue2' in req2.url
14541455

1455-
await rq1.drop()
1456-
await rq2.drop()
1456+
await rq1.mark_request_as_handled(req1)
1457+
await rq2.mark_request_as_handled(req2)
1458+
finally:
1459+
await rq1.drop()
1460+
await rq2.drop()

tests/unit/actor/test_actor_helpers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -296,7 +296,7 @@ async def test_remote_method_with_invalid_timeout(
296296
async with Actor:
297297
actor_method = getattr(Actor, actor_method_name)
298298
with pytest.raises(ValueError, match='Invalid timeout'):
299-
await actor_method(entity_id, timeout='invalid') # type: ignore[arg-type]
299+
await actor_method(entity_id, timeout='invalid')
300300

301301

302302
async def test_abort_with_status_message(

tests/unit/storage_clients/test_apify_dataset_client.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,24 +8,30 @@
88
from apify.storage_clients._apify._dataset_client import ApifyDatasetClient
99

1010

11-
def _make_dataset_client() -> ApifyDatasetClient:
11+
def _make_dataset_client(api_client: AsyncMock | None = None) -> tuple[ApifyDatasetClient, AsyncMock]:
1212
"""Create an ApifyDatasetClient with a mocked API client."""
13-
api_client = AsyncMock()
14-
return ApifyDatasetClient(api_client=api_client, api_public_base_url='', lock=asyncio.Lock())
13+
if api_client is None:
14+
api_client = AsyncMock()
15+
16+
return ApifyDatasetClient(
17+
api_client=api_client,
18+
api_public_base_url='',
19+
lock=asyncio.Lock(),
20+
), api_client
1521

1622

1723
async def test_purge_raises_not_implemented() -> None:
1824
"""Test that purge() raises NotImplementedError."""
19-
client = _make_dataset_client()
25+
client, _ = _make_dataset_client()
2026
with pytest.raises(NotImplementedError, match='Purging datasets is not supported'):
2127
await client.purge()
2228

2329

2430
async def test_drop_calls_api_delete() -> None:
2531
"""Test that drop() delegates to the API client."""
26-
client = _make_dataset_client()
32+
client, api_client = _make_dataset_client()
2733
await client.drop()
28-
client._api_client.delete.assert_awaited_once() # ty: ignore[possibly-missing-attribute]
34+
api_client.delete.assert_awaited_once()
2935

3036

3137
async def test_deprecated_api_public_base_url() -> None:

0 commit comments

Comments
 (0)