Skip to content

Commit f7881a4

Browse files
committed
Clean up docstrings.
1 parent 528fa1b commit f7881a4

4 files changed

Lines changed: 15 additions & 19 deletions

File tree

aiohttp_aiofiles_tutorial/__init__.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from time import perf_counter as timer
55

66
import aiofiles
7-
from aiofiles.threadpool.text import AsyncTextIOWrapper
7+
from aiofiles.threadpool.text import AsyncTextIOWrapper as AsyncIOFile
88
from aiohttp import ClientSession
99
from config import EXPORT_FILE, HTML_HEADERS
1010
from logger import LOGGER
@@ -24,11 +24,11 @@ async def init_script():
2424
LOGGER.success(f"Executed {__name__} in {time.perf_counter() - start_time:0.2f} seconds.")
2525

2626

27-
async def create_and_execute_tasks(outfile: AsyncTextIOWrapper):
27+
async def create_and_execute_tasks(outfile: AsyncIOFile):
2828
"""
2929
Open async HTTP session & execute created tasks.
3030
31-
:param AsyncTextIOWrapper outfile: Filepath of local .csv file to write to.
31+
:param AsyncIOFile outfile: Path of local file to write to.
3232
"""
3333
async with ClientSession(headers=HTML_HEADERS) as session:
3434
task_list = await create_tasks(session, urls, outfile)

aiohttp_aiofiles_tutorial/fetcher.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,20 @@
11
"""Fetch URLs, extract their contents, and write parsed data to file."""
2-
from aiofiles.threadpool.text import AsyncTextIOWrapper
2+
from aiofiles.threadpool.text import AsyncTextIOWrapper as AsyncIOFile
33
from aiohttp import ClientError, ClientSession, InvalidURL
44
from logger import LOGGER
55

66
from .writer import write_to_outfile
77

88

99
async def fetch_url_and_save_title(
10-
session: ClientSession, url: str, outfile: AsyncTextIOWrapper, total_count: int, i: int
10+
session: ClientSession, url: str, outfile: AsyncIOFile, total_count: int, i: int
1111
):
1212
"""
1313
Fetch raw HTML from a URL prior to parsing.
1414
1515
:param ClientSession session: Async HTTP requests session.
1616
:param str url: Target URL to be fetched.
17-
:param AsyncTextIOWrapper outfile: Filepath of local .csv file to write to.
17+
:param AsyncIOFile outfile: Path of local file to write to.
1818
:param int total_count: Total number of URLs to be fetched.
1919
:param int i: Current iteration of URL out of total URLs.
2020
"""

aiohttp_aiofiles_tutorial/tasks.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,28 +3,24 @@
33
from asyncio import Task
44
from typing import List
55

6-
from aiofiles.threadpool.text import AsyncTextIOWrapper
6+
from aiofiles.threadpool.text import AsyncTextIOWrapper as AsyncIOFile
77
from aiohttp import ClientSession
88

99
from .fetcher import fetch_url_and_save_title
1010

1111

12-
async def create_tasks(
13-
session: ClientSession, urls: List[str], outfile: AsyncTextIOWrapper
14-
) -> List[Task]:
12+
async def create_tasks(session: ClientSession, urls: List[str], outfile: AsyncIOFile) -> List[Task]:
1513
"""
1614
Create asyncio tasks to execute the `fetch_url_and_save_title` coroutine.
1715
1816
:param ClientSession session: Async HTTP requests session.
1917
:param List[str] urls: Resource URLs to fetch.
20-
:param AsyncTextIOWrapper outfile: Filepath of local .csv file to write to.
18+
:param AsyncIOFile outfile: Path of local file to write to.
2119
2220
:returns: List[Task]
2321
"""
2422
task_list = []
2523
for i, url in enumerate(urls):
26-
task = asyncio.create_task(
27-
fetch_url_and_save_title(session, url, outfile, len(urls), i)
28-
)
24+
task = asyncio.create_task(fetch_url_and_save_title(session, url, outfile, len(urls), i))
2925
task_list.append(task)
3026
return task_list

aiohttp_aiofiles_tutorial/writer.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,16 @@
11
"""Parse data from fetched URL and write to file asynchronously."""
2-
from aiofiles.threadpool.text import AsyncTextIOWrapper
2+
from aiofiles.threadpool.text import AsyncTextIOWrapper as AsyncIOFile
33
from bs4 import BeautifulSoup
44
from logger import LOGGER
55

66

7-
async def write_to_outfile(
8-
html: str, url: str, outfile: AsyncTextIOWrapper, total_count: int, i: int
9-
):
7+
async def write_to_outfile(html: str, url: str, outfile: AsyncIOFile, total_count: int, i: int):
108
"""
119
Write contents of fetched URL to new file in local directory.
1210
1311
:param str html: Source HTML of a single fetched URL.
1412
:param str url: Target URL to be fetched.
15-
:param AsyncTextIOWrapper outfile: Filepath of local .csv file to write to.
13+
:param AsyncIOFile outfile: Path of local file to write to.
1614
:param int total_count: Total number of URLs to be fetched.
1715
:param int i: Current iteration of URL out of total URLs.
1816
"""
@@ -30,6 +28,8 @@ async def get_html_page_title(html: str, url: str) -> str:
3028
3129
:param str html: Raw HTML source of a given fetched URL.
3230
:param str url: URL associated with the extracted HTML.
31+
32+
:returns: str
3333
"""
3434
soup = BeautifulSoup(html, "html.parser")
3535
return f"{soup.title.string.replace(',', '')}, {url},"

0 commit comments

Comments
 (0)