Skip to content

Commit b9a0cfd

Browse files
authored
docs: Set line length to docs related code to 90 (#398)
Description Set line length to docs related code to 90 to have each code example fully visible without the need to use slider. Update existing examples to be compliant.
1 parent 8c02807 commit b9a0cfd

File tree

10 files changed

+58
-18
lines changed

10 files changed

+58
-18
lines changed

docs/01_overview/code/01_introduction.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,5 +10,8 @@ async def main() -> None:
1010
async with httpx.AsyncClient() as client:
1111
response = await client.get(actor_input['url'])
1212
soup = BeautifulSoup(response.content, 'html.parser')
13-
data = {'url': actor_input['url'], 'title': soup.title.string if soup.title else None}
13+
data = {
14+
'url': actor_input['url'],
15+
'title': soup.title.string if soup.title else None,
16+
}
1417
await Actor.push_data(data)

docs/02_guides/code/02_crawlee_beautifulsoup.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,8 @@ async def main() -> None:
2525

2626
# Create a crawler.
2727
crawler = BeautifulSoupCrawler(
28-
# Limit the crawl to max requests. Remove or increase it for crawling all links.
28+
# Limit the crawl to max requests.
29+
# Remove or increase it for crawling all links.
2930
max_requests_per_crawl=50,
3031
)
3132

docs/02_guides/code/02_crawlee_playwright.py

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,8 @@ async def main() -> None:
2525

2626
# Create a crawler.
2727
crawler = PlaywrightCrawler(
28-
# Limit the crawl to max requests. Remove or increase it for crawling all links.
28+
# Limit the crawl to max requests.
29+
# Remove or increase it for crawling all links.
2930
max_requests_per_crawl=50,
3031
headless=True,
3132
browser_launch_options={
@@ -43,9 +44,18 @@ async def request_handler(context: PlaywrightCrawlingContext) -> None:
4344
data = {
4445
'url': context.request.url,
4546
'title': await context.page.title(),
46-
'h1s': [await h1.text_content() for h1 in await context.page.locator('h1').all()],
47-
'h2s': [await h2.text_content() for h2 in await context.page.locator('h2').all()],
48-
'h3s': [await h3.text_content() for h3 in await context.page.locator('h3').all()],
47+
'h1s': [
48+
await h1.text_content()
49+
for h1 in await context.page.locator('h1').all()
50+
],
51+
'h2s': [
52+
await h2.text_content()
53+
for h2 in await context.page.locator('h2').all()
54+
],
55+
'h3s': [
56+
await h3.text_content()
57+
for h3 in await context.page.locator('h3').all()
58+
],
4959
}
5060

5161
# Store the extracted data to the default dataset.

docs/02_guides/code/scrapy_project/src/__main__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,8 @@
22

33
from twisted.internet import asyncioreactor
44

5-
# Install Twisted's asyncio reactor before importing any other Twisted or Scrapy components.
5+
# Install Twisted's asyncio reactor before importing any other Twisted or
6+
# Scrapy components.
67
asyncioreactor.install() # type: ignore[no-untyped-call]
78

89
import os

docs/02_guides/code/scrapy_project/src/spiders/title.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,8 @@ def parse(self, response: Response) -> Generator[TitleItem | Request, None, None
6060
title = response.css('title::text').extract_first()
6161
yield TitleItem(url=url, title=title)
6262

63-
# Extract all links from the page, create `Request` objects out of them, and yield them.
63+
# Extract all links from the page, create `Request` objects out of them,
64+
# and yield them.
6465
for link_href in response.css('a::attr("href")'):
6566
link_url = urljoin(response.url, link_href.get())
6667
if link_url.startswith(('http://', 'https://')):

docs/03_concepts/code/03_rq.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,9 @@ async def main() -> None:
1919
await queue.add_request(Request.from_url('http://example.com/0'), forefront=True)
2020

2121
# If you try to add an existing request again, it will not do anything
22-
add_request_info = await queue.add_request(Request.from_url('http://different-example.com/5'))
22+
add_request_info = await queue.add_request(
23+
Request.from_url('http://different-example.com/5')
24+
)
2325
Actor.log.info(f'Add request info: {add_request_info}')
2426

2527
processed_request = await queue.get_request(add_request_info.id)
@@ -29,8 +31,8 @@ async def main() -> None:
2931
while not await queue.is_finished():
3032
# Fetch the next unhandled request in the queue
3133
request = await queue.fetch_next_request()
32-
# This can happen due to the eventual consistency of the underlying request queue storage,
33-
# best solution is just to sleep a bit
34+
# This can happen due to the eventual consistency of the underlying request
35+
# queue storage, best solution is just to sleep a bit.
3436
if request is None:
3537
await asyncio.sleep(1)
3638
continue
@@ -45,6 +47,7 @@ async def main() -> None:
4547
Actor.log.info('Request successful.')
4648
await queue.mark_request_as_handled(request)
4749
else:
48-
# If processing the request was unsuccessful, reclaim it so it can be processed again
50+
# If processing the request was unsuccessful, reclaim it so it can be
51+
# processed again.
4952
Actor.log.warning('Request failed, will retry!')
5053
await queue.reclaim_request(request)

docs/03_concepts/code/05_proxy_actor_input.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,9 @@ async def main() -> None:
55
async with Actor:
66
actor_input = await Actor.get_input() or {}
77
proxy_settings = actor_input.get('proxySettings')
8-
proxy_configuration = await Actor.create_proxy_configuration(actor_proxy_input=proxy_settings)
8+
proxy_configuration = await Actor.create_proxy_configuration(
9+
actor_proxy_input=proxy_settings
10+
)
911

1012
if not proxy_configuration:
1113
raise RuntimeError('No proxy configuration available.')

docs/03_concepts/code/05_proxy_rotation.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,15 @@ async def main() -> None:
1717
proxy_url = await proxy_configuration.new_url() # http://proxy-2.com
1818
proxy_url = await proxy_configuration.new_url() # http://proxy-1.com
1919
proxy_url = await proxy_configuration.new_url() # http://proxy-2.com
20-
proxy_url = await proxy_configuration.new_url(session_id='a') # http://proxy-1.com
21-
proxy_url = await proxy_configuration.new_url(session_id='b') # http://proxy-2.com
22-
proxy_url = await proxy_configuration.new_url(session_id='b') # http://proxy-2.com
23-
proxy_url = await proxy_configuration.new_url(session_id='a') # http://proxy-1.com
20+
proxy_url = await proxy_configuration.new_url(
21+
session_id='a'
22+
) # http://proxy-1.com
23+
proxy_url = await proxy_configuration.new_url(
24+
session_id='b'
25+
) # http://proxy-2.com
26+
proxy_url = await proxy_configuration.new_url(
27+
session_id='b'
28+
) # http://proxy-2.com
29+
proxy_url = await proxy_configuration.new_url(
30+
session_id='a'
31+
) # http://proxy-1.com

docs/03_concepts/code/09_webserver.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,9 @@ def run_server() -> None:
2121
# Start the HTTP server on the provided port,
2222
# and save a reference to the server.
2323
global http_server
24-
with ThreadingHTTPServer(('', Actor.config.web_server_port), RequestHandler) as server:
24+
with ThreadingHTTPServer(
25+
('', Actor.config.web_server_port), RequestHandler
26+
) as server:
2527
Actor.log.info(f'Server running on {Actor.config.web_server_port}')
2628
http_server = server
2729
server.serve_forever()

docs/pyproject.toml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
# Line lenght different from the rest of the code to make sure that the example codes visualised on the generated
2+
# documentation webpages are shown without vertical slider to make them more readable.
3+
4+
[tool.ruff]
5+
# Inherit all from project top configuration file.
6+
extend = "../pyproject.toml"
7+
8+
# Override just line length
9+
line-length = 90 # Maximum possible fit to the doc webpage. Longer lines need slider.

0 commit comments

Comments
 (0)