Skip to content

Commit c14fb9a

Browse files
Pijukateljanbuchar
andauthored
feat: Handle request list user input (#326)
Add helper function to handle request list inputs. Closes: #310 --------- Co-authored-by: Jan Buchar <[email protected]>
1 parent 777637a commit c14fb9a

File tree

12 files changed

+387
-10
lines changed

12 files changed

+387
-10
lines changed

pyproject.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -141,6 +141,9 @@ indent-style = "space"
141141
docstring-quotes = "double"
142142
inline-quotes = "single"
143143

144+
[tool.ruff.lint.flake8-type-checking]
145+
runtime-evaluated-base-classes = ["pydantic.BaseModel", "crawlee.configuration.Configuration"]
146+
144147
[tool.ruff.lint.flake8-builtins]
145148
builtins-ignorelist = ["id"]
146149

src/apify/_actor.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88

99
from lazy_object_proxy import Proxy
1010
from pydantic import AliasChoices
11-
from typing_extensions import Self
1211

1312
from apify_client import ApifyClientAsync
1413
from apify_shared.consts import ActorEnvVars, ActorExitCodes, ApifyEnvVars
@@ -31,6 +30,8 @@
3130
import logging
3231
from types import TracebackType
3332

33+
from typing_extensions import Self
34+
3435
from crawlee.proxy_configuration import _NewUrlFunction
3536

3637
from apify._models import Webhook

src/apify/_configuration.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
# ruff: noqa: TCH001 TCH002 TCH003 (so that pydantic annotations work)
21
from __future__ import annotations
32

43
from datetime import datetime, timedelta

src/apify/_models.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
# ruff: noqa: TCH001 TCH002 TCH003 (Pydantic)
21
from __future__ import annotations
32

43
from datetime import datetime, timedelta

src/apify/_platform_event_manager.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from __future__ import annotations
22

33
import asyncio
4-
from datetime import datetime # noqa: TCH003
4+
from datetime import datetime
55
from typing import TYPE_CHECKING, Annotated, Any, Literal, Union
66

77
import websockets.client

src/apify/scrapy/middlewares/apify_proxy.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,13 @@
11
from __future__ import annotations
22

3+
from typing import TYPE_CHECKING
34
from urllib.parse import ParseResult, urlparse
45

56
try:
6-
from scrapy import Request, Spider # noqa: TCH002
7+
if TYPE_CHECKING:
8+
from scrapy import Request, Spider
9+
from scrapy.crawler import Crawler
710
from scrapy.core.downloader.handlers.http11 import TunnelError
8-
from scrapy.crawler import Crawler # noqa: TCH002
911
from scrapy.exceptions import NotConfigured
1012
except ImportError as exc:
1113
raise ImportError(

src/apify/scrapy/pipelines/actor_dataset_push.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,12 @@
11
from __future__ import annotations
22

3+
from typing import TYPE_CHECKING
4+
35
from itemadapter.adapter import ItemAdapter
46

57
try:
6-
from scrapy import Item, Spider # noqa: TCH002
8+
if TYPE_CHECKING:
9+
from scrapy import Item, Spider
710
except ImportError as exc:
811
raise ImportError(
912
'To use this module, you need to install the "scrapy" extra. Run "pip install apify[scrapy]".',

src/apify/scrapy/scheduler.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,18 @@
11
from __future__ import annotations
22

33
import traceback
4+
from typing import TYPE_CHECKING
45

56
from apify._configuration import Configuration
67
from apify.apify_storage_client import ApifyStorageClient
78

89
try:
910
from scrapy import Spider
1011
from scrapy.core.scheduler import BaseScheduler
11-
from scrapy.http.request import Request # noqa: TCH002
1212
from scrapy.utils.reactor import is_asyncio_reactor_installed
13+
14+
if TYPE_CHECKING:
15+
from scrapy.http.request import Request
1316
except ImportError as exc:
1417
raise ImportError(
1518
'To use this module, you need to install the "scrapy" extra. Run "pip install apify[scrapy]".',

src/apify/scrapy/utils.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,17 @@
22

33
import asyncio
44
from base64 import b64encode
5+
from typing import TYPE_CHECKING
56
from urllib.parse import unquote
67

78
from apify_shared.utils import ignore_docs
89

910
try:
10-
from scrapy.settings import Settings # noqa: TCH002
1111
from scrapy.utils.project import get_project_settings
1212
from scrapy.utils.python import to_bytes
13+
14+
if TYPE_CHECKING:
15+
from scrapy.settings import Settings
1316
except ImportError as exc:
1417
raise ImportError(
1518
'To use this module, you need to install the "scrapy" extra. For example, if you use pip, run '

src/apify/storages/__init__.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
11
from crawlee.storages import Dataset, KeyValueStore, RequestQueue
22

3-
__all__ = ['Dataset', 'KeyValueStore', 'RequestQueue']
3+
from ._request_list import RequestList
4+
5+
__all__ = ['Dataset', 'KeyValueStore', 'RequestQueue', 'RequestList']

0 commit comments

Comments
 (0)