使用Python 3.7+使用asyncio async / await进行100k API调用的最佳方法是什么?想法是始终并行使用100个任务?
应避免的是:
1.开始处理所有100k任务
2.等待所有100个并行任务完成,以便安排新的100个批次。
此示例说明了第一种方法,这不是必需的。
import aiohttp
import asyncio
async def fetch(session, url):
async with session.get(url) as response:
return await response.text()
async def main():
urls = [
'http://python.org',
'https://google.com',
'http://yifei.me'
]
tasks = []
async with aiohttp.ClientSession() as session:
for url in urls:
tasks.append(fetch(session, url))
htmls = await asyncio.gather(*tasks)
for html in htmls:
print(html[:100])
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
答案 0 :(得分:2)
使用semaphore
。信号量用于限制并发操作。 Python的asyncio带有自己的异步版本的信号量。
import aiohttp
import asyncio
async def fetch(session, url, sema):
async with sema, session.get(url) as response:
return await response.text()
async def main():
urls = [
'http://python.org',
'https://google.com',
'http://yifei.me',
'other urls...'
]
tasks = []
sema = asyncio.BoundedSemaphore(value=100)
async with aiohttp.ClientSession() as session:
for url in urls:
tasks.append(fetch(session, url, sema))
htmls = await asyncio.gather(*tasks)
for html in htmls:
print(html[:100])
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())