44
55try :
66 from curl_cffi .requests import AsyncSession
7- from curl_cffi .requests .errors import RequestsError
8- from curl_cffi .requests .impersonate import BrowserType
7+ from curl_cffi .requests .exceptions import ProxyError as CurlProxyError
8+ from curl_cffi .requests .exceptions import RequestException as CurlRequestError
9+ from curl_cffi .requests .impersonate import DEFAULT_CHROME as CURL_DEFAULT_CHROME
910except ImportError as exc :
1011 raise ImportError (
1112 "To import anything from this subpackage, you need to install the 'curl-impersonate' extra."
2425
2526 from curl_cffi .requests import Response
2627
27- from crawlee ._types import HttpHeaders , HttpMethod
28+ from crawlee ._types import HttpHeaders , HttpMethod , HttpQueryParams
2829 from crawlee .base_storage_client ._models import Request
2930 from crawlee .proxy_configuration import ProxyInfo
3031 from crawlee .sessions import Session
@@ -116,14 +117,14 @@ async def crawl(
116117 try :
117118 response = await client .request (
118119 url = request .url ,
119- method = request .method .upper (), # curl-cffi requires uppercase method
120+ method = request .method .upper (), # type: ignore # curl-cffi requires uppercase method
120121 headers = request .headers ,
121122 params = request .query_params ,
122- data = request .data ,
123+ data = request .payload ,
123124 cookies = session .cookies if session else None ,
124125 allow_redirects = True ,
125126 )
126- except RequestsError as exc :
127+ except CurlRequestError as exc :
127128 if self ._is_proxy_error (exc ):
128129 raise ProxyError from exc
129130 raise
@@ -150,7 +151,7 @@ async def send_request(
150151 * ,
151152 method : HttpMethod = 'GET' ,
152153 headers : HttpHeaders | None = None ,
153- query_params : dict [ str , Any ] | None = None ,
154+ query_params : HttpQueryParams | None = None ,
154155 data : dict [str , Any ] | None = None ,
155156 session : Session | None = None ,
156157 proxy_info : ProxyInfo | None = None ,
@@ -161,14 +162,14 @@ async def send_request(
161162 try :
162163 response = await client .request (
163164 url = url ,
164- method = method .upper (), # curl-cffi requires uppercase method
165+ method = method .upper (), # type: ignore # curl-cffi requires uppercase method
165166 headers = headers ,
166167 params = query_params ,
167168 data = data ,
168169 cookies = session .cookies if session else None ,
169170 allow_redirects = True ,
170171 )
171- except RequestsError as exc :
172+ except CurlRequestError as exc :
172173 if self ._is_proxy_error (exc ):
173174 raise ProxyError from exc
174175 raise
@@ -194,7 +195,7 @@ def _get_client(self, proxy_url: str | None) -> AsyncSession:
194195 # are set as default options.
195196 kwargs : dict [str , Any ] = {
196197 'proxy' : proxy_url ,
197- 'impersonate' : BrowserType . chrome ,
198+ 'impersonate' : CURL_DEFAULT_CHROME ,
198199 }
199200
200201 # Update the default kwargs with any additional user-provided kwargs.
@@ -206,13 +207,12 @@ def _get_client(self, proxy_url: str | None) -> AsyncSession:
206207 return self ._client_by_proxy_url [proxy_url ]
207208
208209 @staticmethod
209- def _is_proxy_error (error : RequestsError ) -> bool :
210+ def _is_proxy_error (error : CurlRequestError ) -> bool :
210211 """Helper to check whether the given error is a proxy-related error."""
211212 if any (needle in str (error ) for needle in ROTATE_PROXY_ERRORS ):
212213 return True
213214
214- # Once https://github.com/yifeikong/curl_cffi/issues/361 is resolved, do it better.
215- if 'CONNECT tunnel failed' in str (error ): # noqa: SIM103
215+ if isinstance (error , CurlProxyError ): # noqa: SIM103
216216 return True
217217
218218 return False
0 commit comments