Skip to content

Commit e078346

Browse files
committed
Simplify logic.
1 parent 0d254fe commit e078346

3 files changed

Lines changed: 6 additions & 1273 deletions

File tree

Lines changed: 5 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,4 @@
11
"""Make hundreds of requests concurrently and save responses to disk."""
2-
from asyncio import Task
3-
from typing import Tuple, List, Optional
4-
52
import asyncio
63
import time
74
from time import perf_counter as timer
@@ -21,24 +18,19 @@ async def init_script():
2118
"""Initiate script by preparing an output file prior to executing tasks."""
2219
start_time = timer()
2320
async with aiofiles.open(FETCHED_URL_TITLES, mode="w+") as output_file:
24-
await output_file.write("title,url\n")
25-
done, pending = await create_and_execute_tasks(output_file)
26-
if len(pending) == 0:
27-
await output_file.close()
28-
21+
await output_file.write("title,url,\n")
22+
await create_and_execute_tasks(output_file)
23+
await output_file.close()
2924
LOGGER.success(f"Executed {__name__} in {time.perf_counter() - start_time:0.2f} seconds.")
3025

3126

32-
async def create_and_execute_tasks(output_file: AsyncTextIOWrapper) -> Tuple[List[Optional[Task]], List[Optional[Task]]]:
27+
async def create_and_execute_tasks(output_file: AsyncTextIOWrapper):
3328
"""
3429
Open async HTTP session & execute created tasks.
3530
3631
:param AsyncTextIOWrapper output_file: Filepath to local .json file to write output to.
37-
38-
:returns: Tuple[List[Optional[Task]], List[Optional[Task]]]
3932
"""
4033
async with ClientSession(headers=HTML_HEADERS) as session:
4134
task_list = await create_tasks(session, urls, output_file)
4235
inspect_event_loop()
43-
done, pending = await asyncio.wait(task_list)
44-
return done, pending
36+
await asyncio.gather(*task_list)

aiohttp_aiofiles_tutorial/writer.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,5 +30,4 @@ async def get_html_page_title(html_body: str, url: str) -> str:
3030
:param str url: URL associated with the extracted HTML.
3131
"""
3232
soup = BeautifulSoup(html_body, "html.parser")
33-
return f"{soup.title.string.replace(',', '')}, {url}"
34-
33+
return f"{soup.title.string.replace(',', '')}, {url},"

0 commit comments

Comments
 (0)