Skip to content

Commit 9becf12

Browse files
vdusekclaude
andauthored
test: fix flaky Windows CI timeouts in browser tests (#1806)
## Summary - Replace `asyncio.sleep` with `asyncio.Event().wait()` in adaptive playwright timeout test and increase timeout from 1s to 10s. - Increase Firefox `operation_timeout` from 15s to 60s in browser pool test. --------- Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent e26eb51 commit 9becf12

File tree

2 files changed

+13
-7
lines changed

2 files changed

+13
-7
lines changed

tests/unit/browsers/test_browser_pool.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from __future__ import annotations
22

3+
from datetime import timedelta
34
from typing import TYPE_CHECKING
45
from unittest.mock import AsyncMock
56

@@ -103,7 +104,10 @@ async def test_new_page_with_each_plugin(server_url: URL) -> None:
103104

104105
@run_alone_on_mac
105106
async def test_with_default_plugin_constructor(server_url: URL) -> None:
106-
async with BrowserPool.with_default_plugin(headless=True, browser_type='firefox') as browser_pool:
107+
# Use a generous operation timeout so that Firefox has enough time to launch on slow Windows CI.
108+
async with BrowserPool.with_default_plugin(
109+
headless=True, browser_type='firefox', operation_timeout=timedelta(seconds=60)
110+
) as browser_pool:
107111
assert len(browser_pool.plugins) == 1
108112
assert isinstance(browser_pool.plugins[0], PlaywrightBrowserPlugin)
109113

tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -576,15 +576,17 @@ async def test_adaptive_playwright_crawler_timeout_in_sub_crawler(test_urls: lis
576576
crawler.
577577
"""
578578
static_only_predictor_no_detection = _SimpleRenderingTypePredictor(detection_probability_recommendation=cycle([0]))
579-
request_handler_timeout = timedelta(seconds=1)
579+
# Use a generous timeout so the static pipeline has enough time to reach the handler even on slow CI.
580+
# The handler will block indefinitely, so the timeout will always fire during the handler's wait.
581+
request_handler_timeout = timedelta(seconds=10)
580582

581583
crawler = AdaptivePlaywrightCrawler.with_beautifulsoup_static_parser(
582584
max_request_retries=0,
583585
rendering_type_predictor=static_only_predictor_no_detection,
584586
request_handler_timeout=request_handler_timeout,
585587
)
586-
mocked_static_handler = Mock()
587-
mocked_browser_handler = Mock()
588+
mocked_static_handler = Mock(name='static_handler')
589+
mocked_browser_handler = Mock(name='browser_handler')
588590

589591
@crawler.router.default_handler
590592
async def request_handler(context: AdaptivePlaywrightCrawlingContext) -> None:
@@ -596,13 +598,13 @@ async def request_handler(context: AdaptivePlaywrightCrawlingContext) -> None:
596598
mocked_static_handler()
597599
# Relax timeout for the fallback browser request to allow for slow browser startup on CI
598600
crawler._request_handler_timeout = timedelta(seconds=120)
599-
# Sleep for time obviously larger than top crawler timeout.
600-
await asyncio.sleep(request_handler_timeout.total_seconds() * 3)
601+
# Block indefinitely - will be cancelled when the request_handler_timeout fires.
602+
await asyncio.Event().wait()
601603

602604
await crawler.run(test_urls[:1])
603605

604606
mocked_static_handler.assert_called_once_with()
605-
# Browser handler was capable of running despite static handler having sleep time larger than top handler timeout.
607+
# Browser handler was capable of running despite static handler blocking longer than the handler timeout.
606608
mocked_browser_handler.assert_called_once_with()
607609

608610

0 commit comments

Comments
 (0)