forked from apify/crawlee-python
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtest_browser_pool.py
More file actions
356 lines (248 loc) · 12.5 KB
/
test_browser_pool.py
File metadata and controls
356 lines (248 loc) · 12.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
from __future__ import annotations
from typing import TYPE_CHECKING
from unittest.mock import AsyncMock
import pytest
from crawlee.browsers import BrowserPool, PlaywrightBrowserPlugin
from crawlee.browsers._browser_controller import BrowserController
from crawlee.browsers._types import CrawleePage
from tests.unit.utils import run_alone_on_mac
if TYPE_CHECKING:
from collections.abc import Mapping
from typing import Any
from yarl import URL
from crawlee.proxy_configuration import ProxyInfo
async def test_default_plugin_new_page_creation(server_url: URL) -> None:
async with BrowserPool() as browser_pool:
page_1 = await browser_pool.new_page()
await page_1.page.goto(str(server_url))
assert page_1.browser_type == 'chromium'
assert page_1.page.url == str(server_url)
assert '<html' in await page_1.page.content() # there is some HTML content
assert browser_pool.total_pages_count == 1
page_2 = await browser_pool.new_page()
await page_2.page.goto(str(server_url / 'status/200'))
assert page_2.browser_type == 'chromium'
assert page_2.page.url == str(server_url / 'status/200')
assert '<html' in await page_1.page.content() # there is some HTML content
assert browser_pool.total_pages_count == 2
await page_1.page.close()
await page_2.page.close()
async def test_multiple_plugins_new_page_creation(server_url: URL) -> None:
plugin_chromium = PlaywrightBrowserPlugin(browser_type='chromium')
plugin_firefox = PlaywrightBrowserPlugin(browser_type='firefox')
async with BrowserPool([plugin_chromium, plugin_firefox]) as browser_pool:
assert browser_pool.plugins == [plugin_chromium, plugin_firefox]
page_1 = await browser_pool.new_page()
await page_1.page.goto(str(server_url))
assert page_1.browser_type == 'chromium'
assert page_1.page.url == str(server_url)
assert '<html' in await page_1.page.content() # there is some HTML content
page_2 = await browser_pool.new_page()
await page_2.page.goto(str(server_url / 'headers'))
assert page_2.browser_type == 'firefox'
assert page_2.page.url == str(server_url / 'headers')
assert '<html' in await page_2.page.content() # there is some HTML content
page_3 = await browser_pool.new_page()
await page_3.page.goto(str(server_url / 'user-agent'))
assert page_3.browser_type == 'chromium'
assert page_3.page.url == str(server_url / 'user-agent')
assert '<html' in await page_3.page.content() # there is some HTML content
await page_1.page.close()
await page_2.page.close()
await page_3.page.close()
assert browser_pool.total_pages_count == 3
@pytest.mark.flaky(
rerun=3,
reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1660.',
)
async def test_new_page_with_each_plugin(server_url: URL) -> None:
plugin_chromium = PlaywrightBrowserPlugin(browser_type='chromium')
plugin_firefox = PlaywrightBrowserPlugin(browser_type='firefox')
async with BrowserPool([plugin_chromium, plugin_firefox]) as browser_pool:
pages = await browser_pool.new_page_with_each_plugin()
assert len(pages) == 2
assert pages[0].browser_type == 'chromium'
assert pages[1].browser_type == 'firefox'
await pages[0].page.goto(str(server_url))
assert pages[0].page.url == str(server_url)
assert '<html' in await pages[0].page.content() # there is some HTML content
await pages[1].page.goto(str(server_url / 'headers'))
assert pages[1].page.url == str(server_url / 'headers')
assert '<html' in await pages[1].page.content()
for page in pages:
await page.page.close()
assert browser_pool.total_pages_count == 2
@run_alone_on_mac
async def test_with_default_plugin_constructor(server_url: URL) -> None:
async with BrowserPool.with_default_plugin(headless=True, browser_type='firefox') as browser_pool:
assert len(browser_pool.plugins) == 1
assert isinstance(browser_pool.plugins[0], PlaywrightBrowserPlugin)
page = await browser_pool.new_page()
assert page.browser_type == 'firefox'
await page.page.goto(str(server_url))
assert page.page.url == str(server_url)
assert '<html' in await page.page.content() # there is some HTML content
await page.page.close()
assert browser_pool.total_pages_count == 1
async def test_new_page_with_existing_id() -> None:
async with BrowserPool() as browser_pool:
page_1 = await browser_pool.new_page()
with pytest.raises(ValueError, match=r'Page with ID: .* already exists.'):
await browser_pool.new_page(page_id=page_1.id)
async def test_new_page_with_invalid_plugin() -> None:
plugin_1 = PlaywrightBrowserPlugin(browser_type='chromium')
plugin_2 = PlaywrightBrowserPlugin(browser_type='firefox')
async with BrowserPool([plugin_1]) as browser_pool:
with pytest.raises(ValueError, match=r'Provided browser_plugin is not one of the plugins used by BrowserPool.'):
await browser_pool.new_page(browser_plugin=plugin_2)
async def test_resource_management(server_url: URL) -> None:
playwright_plugin = PlaywrightBrowserPlugin(browser_type='chromium')
async with BrowserPool([playwright_plugin]) as browser_pool:
page = await browser_pool.new_page()
await page.page.goto(str(server_url))
assert page.page.url == str(server_url)
assert '<html' in await page.page.content() # there is some HTML content
assert browser_pool.total_pages_count == 1
# All pages should be closed in __aexit__
assert page.page.is_closed()
async def test_methods_raise_error_when_not_active() -> None:
plugin = PlaywrightBrowserPlugin()
browser_pool = BrowserPool([plugin])
assert browser_pool.active is False
with pytest.raises(RuntimeError, match=r'BrowserPool is not active.'):
await browser_pool.new_page()
with pytest.raises(RuntimeError, match=r'BrowserPool is not active.'):
await browser_pool.new_page_with_each_plugin()
with pytest.raises(RuntimeError, match=r'BrowserPool is already active.'):
async with browser_pool, browser_pool:
pass
async with browser_pool:
assert browser_pool.active is True
async def test_with_plugin_contains_page_options(server_url: URL) -> None:
plugin = PlaywrightBrowserPlugin(browser_new_context_options={'user_agent': 'My Best User-Agent'})
async with BrowserPool(plugins=[plugin]) as browser_pool:
test_page = await browser_pool.new_page()
await test_page.page.goto(str(server_url / 'user-agent'))
assert 'My Best User-Agent' in await test_page.page.content()
await test_page.page.close()
@pytest.mark.parametrize(
('retire_after_page_count', 'expect_equal_browsers'),
[
pytest.param(2, True, id='Two pages opened in the same browser'),
pytest.param(1, False, id='Each page opened in a new browser.'),
],
)
async def test_browser_pool_retire_browser_after_page_count(
retire_after_page_count: int, *, expect_equal_browsers: bool
) -> None:
async with BrowserPool(retire_browser_after_page_count=retire_after_page_count) as browser_pool:
test_page = await browser_pool.new_page()
first_browser = test_page.page.context
await test_page.page.close()
test_page = await browser_pool.new_page()
second_browser = test_page.page.context
await test_page.page.close()
if expect_equal_browsers:
assert first_browser is second_browser
else:
assert first_browser is not second_browser
async def test_pre_page_create_hook_is_called() -> None:
call_mock = AsyncMock()
async with BrowserPool() as browser_pool:
@browser_pool.pre_page_create_hook
async def hook(
page_id: str,
controller: BrowserController,
browser_new_context_options: dict[str, Any],
proxy_info: ProxyInfo | None,
) -> None:
await call_mock(page_id, controller, browser_new_context_options, proxy_info)
browser_new_context_options['user_agent'] = 'Modified User-Agent'
assert len(controller.pages) == 0
test_page = await browser_pool.new_page()
user_agent = await test_page.page.evaluate('navigator.userAgent')
await test_page.page.close()
assert user_agent == 'Modified User-Agent'
call_mock.assert_awaited_once()
page_id, controller, _, proxy_info = call_mock.call_args[0]
assert isinstance(page_id, str)
assert test_page.id == page_id
assert isinstance(controller, BrowserController)
assert proxy_info is None
async def test_post_page_create_hook_is_called() -> None:
call_mock = AsyncMock()
async with BrowserPool() as browser_pool:
@browser_pool.post_page_create_hook
async def hook(crawlee_page: CrawleePage, controller: BrowserController) -> None:
await call_mock(crawlee_page, controller)
await crawlee_page.page.evaluate('window.__hook_applied = true')
assert isinstance(crawlee_page, CrawleePage)
assert len(controller.pages) == 1
test_page = await browser_pool.new_page()
js_result = await test_page.page.evaluate('window.__hook_applied')
await test_page.page.close()
assert js_result is True
call_mock.assert_awaited_once()
crawlee_page, controller = call_mock.call_args[0]
assert test_page is crawlee_page
assert isinstance(controller, BrowserController)
async def test_pre_page_close_hook() -> None:
call_mock = AsyncMock()
async with BrowserPool() as browser_pool:
@browser_pool.pre_page_close_hook
async def hook(crawlee_page: CrawleePage, controller: BrowserController) -> None:
await call_mock(crawlee_page, controller)
assert not crawlee_page.page.is_closed()
assert len(controller.pages) == 1
test_page = await browser_pool.new_page()
await test_page.page.close()
call_mock.assert_awaited_once()
assert test_page.page.is_closed()
async def test_post_page_close_hook() -> None:
call_mock = AsyncMock()
async with BrowserPool() as browser_pool:
@browser_pool.post_page_close_hook
async def hook(page_id: str, controller: BrowserController) -> None:
await call_mock(page_id, controller)
assert len(controller.pages) == 0
test_page = await browser_pool.new_page()
await test_page.page.close()
page_id, controller = call_mock.call_args[0]
call_mock.assert_awaited_once()
assert test_page.id == page_id
assert isinstance(controller, BrowserController)
async def test_page_hooks_execution_order() -> None:
call_order: list[str] = []
async with BrowserPool() as browser_pool:
@browser_pool.pre_page_create_hook
async def pre_create(
_page_id: str,
_controller: BrowserController,
_browser_new_context_options: Mapping[str, Any],
_proxy_info: ProxyInfo | None,
) -> None:
call_order.append('pre_create')
@browser_pool.post_page_create_hook
async def post_create(_crawlee_page: CrawleePage, _controller: BrowserController) -> None:
call_order.append('post_create')
@browser_pool.pre_page_close_hook
async def pre_close(_crawlee_page: CrawleePage, _controller: BrowserController) -> None:
call_order.append('pre_close')
@browser_pool.post_page_close_hook
async def post_close(_page_id: str, _controller: BrowserController) -> None:
call_order.append('post_close')
page = await browser_pool.new_page()
await page.page.close()
assert call_order == ['pre_create', 'post_create', 'pre_close', 'post_close']
async def test_multiple_hooks_all_called() -> None:
call_order: list[str] = []
async with BrowserPool() as browser_pool:
@browser_pool.post_page_create_hook
async def first(_crawlee_page: CrawleePage, _controller: BrowserController) -> None:
call_order.append('first')
@browser_pool.post_page_create_hook
async def second(_crawlee_page: CrawleePage, _controller: BrowserController) -> None:
call_order.append('second')
page = await browser_pool.new_page()
await page.page.close()
assert call_order == ['first', 'second']