@@ -1168,7 +1168,7 @@ async def test_crawler_multiple_stops_in_parallel() -> None:
11681168 # Set concurrency to 2 to ensure two urls are being visited in parallel.
11691169 crawler = BasicCrawler (concurrency_settings = ConcurrencySettings (desired_concurrency = 2 , max_concurrency = 2 ))
11701170
1171- both_handlers_started = asyncio .Barrier (2 ) # type :ignore[attr-defined ] # Test is skipped in older Python versions.
1171+ both_handlers_started = asyncio .Barrier (2 ) # ty :ignore[unresolved-attribute ] # Test is skipped in older Python versions.
11721172 only_one_handler_at_a_time = asyncio .Semaphore (1 )
11731173
11741174 @crawler .router .default_handler
@@ -1352,7 +1352,7 @@ async def test_context_use_state_race_condition_in_handlers(key_value_store: Key
13521352 Result should be incremented by 2.
13531353 Method `use_state` must be implemented in a way that prevents race conditions in such scenario."""
13541354 # Test is skipped in older Python versions.
1355- from asyncio import Barrier # type :ignore[attr-defined ] # noqa: PLC0415
1355+ from asyncio import Barrier # ty :ignore[unresolved-import ] # noqa: PLC0415
13561356
13571357 crawler = BasicCrawler ()
13581358 store = await crawler .get_key_value_store ()
@@ -1393,7 +1393,7 @@ async def test_timeout_in_handler(sleep_type: str) -> None:
13931393 Crawler should attempt to retry it.
13941394 This test creates situation where the request handler times out twice, on third retry it does not time out."""
13951395 # Test is skipped in older Python versions.
1396- from asyncio import timeout # type :ignore[attr-defined ] # noqa: PLC0415
1396+ from asyncio import timeout # ty :ignore[unresolved-import ] # noqa: PLC0415
13971397
13981398 non_realtime_system_coefficient = 10
13991399 handler_timeout = timedelta (seconds = 1 )
0 commit comments