-
Notifications
You must be signed in to change notification settings - Fork 704
Expand file tree
/
Copy paththrottling_example.py
More file actions
41 lines (31 loc) · 1.24 KB
/
throttling_example.py
File metadata and controls
41 lines (31 loc) · 1.24 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
import asyncio
from crawlee.crawlers import BasicCrawler, BasicCrawlingContext
from crawlee.request_loaders import ThrottlingRequestManager
from crawlee.storages import RequestQueue
async def main() -> None:
# Open the default request queue.
queue = await RequestQueue.open()
# Wrap it with ThrottlingRequestManager for specific domains.
# The throttler uses the same storage backend as the underlying queue.
throttler = ThrottlingRequestManager(
queue,
domains=['api.example.com', 'slow-site.org'],
)
# Pass the throttler as the crawler's request manager.
crawler = BasicCrawler(request_manager=throttler)
@crawler.router.default_handler
async def handler(context: BasicCrawlingContext) -> None:
context.log.info(f'Processing {context.request.url}')
# Add requests. Listed domains are routed directly to their
# throttled sub-queues. Others go to the main queue.
await throttler.add_requests(
[
'https://api.example.com/data',
'https://api.example.com/users',
'https://slow-site.org/page1',
'https://fast-site.com/page1', # Not throttled
]
)
await crawler.run()
if __name__ == '__main__':
asyncio.run(main())