import aiohttp import asyncio from bs4 import BeautifulSoup from urllib.parse import urljoin import subprocess async def extract_links(url): async with aiohttp.ClientSession() as session: async with session.get(url) as response: soup = BeautifulSoup(await response.text(), 'html.parser') for link in soup.find_all('a'): yield urljoin(url, link.get('href')) async def benchmark(url): # This will still block, because subprocess.run is not async subprocess.run(['hey', '-n', '100000', '-c', '100', url]) url = 'http://localhost:9999/http/' async def main(): async for link in extract_links(url): print("BENCHMARKING " + link.replace('/http/', '/dav/')) await benchmark(link.replace('/http/', '/dav/')) print("BENCHMARKING " + link) await benchmark(link) # async for inl in extract_links(link): # print("BENCHMARKING " + inl) # await benchmark(inl) # Python 3.7+ asyncio.run(main())