python爬虫 asyncio aiohttp aiofiles 多任务异步协程爬取图片
main.py
"""=== coding: UTF8 ==="""
# requests.get()同步代码 -> 异步操作aiohttp
import asyncio
import aiohttp
import aiofilesurls = ["https://img.lianzhixiu.com/uploads/allimg/202109/9999/d1eeaa0450.jpg","https://img.lianzhixiu.com/uploads/allimg/202109/9999/6747451f08.jpg","https://img.lianzhixiu.com/uploads/allimg/202108/9999/88abd53cc1.jpg"]async def aioDownload(url):# 发送请求# 得到图片内容# 保存到文件print("开始下载")name = url.rsplit("/", 1)[1] # 从右边切一次,得到[1]位置的内容async with aiohttp.ClientSession() as session: # 相当于requestsasync with session.get(url) as resp: # 相当于resp = requests.get()# 请求回来了,aiofiles写入文件,async with aiofiles.open(name, mode='wb') as f: # 创建文件await f.write(await resp.content.read()) # 读取内容是异步的,需要await挂起,resp.text()print("下载完成")async def main():# 准备异步协程对象列表tasks = []for url in urls:task = asyncio.create_task(aioDownload(url))tasks.append(task)await asyncio.wait(tasks)"""
========================================
主函数功能测试
========================================
"""
if __name__ == '__main__':# 一次性启动多个任务# asyncio.run(main())loop = asyncio.get_event_loop()loop.run_until_complete(main())
关注公众号,获取更多资料