|
10 | 10 |
|
11 | 11 | from crawlee import Request
|
12 | 12 | from crawlee._types import HttpMethod
|
13 |
| -from crawlee.http_clients import BaseHttpClient, HttpxHttpClient |
| 13 | +from crawlee.http_clients import HttpClient, HttpxHttpClient |
14 | 14 | from crawlee.request_loaders import RequestList as CrawleeRequestList
|
15 | 15 |
|
16 | 16 | from apify._utils import docs_group
|
@@ -49,7 +49,7 @@ class RequestList(CrawleeRequestList):
|
49 | 49 | async def open(
|
50 | 50 | name: str | None = None,
|
51 | 51 | request_list_sources_input: list[dict[str, Any]] | None = None,
|
52 |
| - http_client: BaseHttpClient | None = None, |
| 52 | + http_client: HttpClient | None = None, |
53 | 53 | ) -> RequestList:
|
54 | 54 | """Creates RequestList from Actor input requestListSources.
|
55 | 55 |
|
@@ -78,7 +78,7 @@ async def open(
|
78 | 78 |
|
79 | 79 | @staticmethod
|
80 | 80 | async def _create_request_list(
|
81 |
| - name: str | None, request_list_sources_input: list[dict[str, Any]], http_client: BaseHttpClient | None |
| 81 | + name: str | None, request_list_sources_input: list[dict[str, Any]], http_client: HttpClient | None |
82 | 82 | ) -> RequestList:
|
83 | 83 | if not http_client:
|
84 | 84 | http_client = HttpxHttpClient()
|
@@ -108,7 +108,7 @@ def _create_requests_from_input(simple_url_inputs: list[_SimpleUrlInput]) -> lis
|
108 | 108 |
|
109 | 109 | @staticmethod
|
110 | 110 | async def _fetch_requests_from_url(
|
111 |
| - remote_url_requests_inputs: list[_RequestsFromUrlInput], http_client: BaseHttpClient |
| 111 | + remote_url_requests_inputs: list[_RequestsFromUrlInput], http_client: HttpClient |
112 | 112 | ) -> list[Request]:
|
113 | 113 | """Crete list of requests from url.
|
114 | 114 |
|
|
0 commit comments