在爬取百度小说中的西游记时,最后的文章内容存储在“content”中,但是运行代码就显示 “ KeyError: 'content' ” ,下面是我的代码
```python
# https://dushu.baidu.com/api/pc/getCatalog?data={"book_id":"4306063500"}
# 所有章节的内容(名称,cid)
# https://dushu.baidu.com/api/pc/getChapterContent?data={"book_id":"4306063500","cid":"4306063500|1569782244","need_bookinfo":1}
# 具体内容
import requests
import asyncio
import aiohttp
import json
import aiofiles
async def aiodownload(cid,b_id,title):
date = {
"book_id": b_id,
"cid": f"{b_id}|{cid}",
"need_bookinfo": 1
}
date_json = json.dumps(date)
url = f"https://dushu.baidu.com/api/pc/getCatalog?data={date_json}"
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
dic = await resp.json()
async with aiofiles.open(title, "w", encoding="utf-8") as f:
await f.write(dic['data']['novel']['content']) # 把小说内容写出
async def getCatalog(url):
resp = requests.get(url)
dic = resp.json()
tasks = []
for item in dic['data']['novel']['items']: # item 对应每一个章节的内容
title = item['title']
cid = item['cid']
# 准备异步任务
tasks.append(aiodownload(cid, b_id, title))
await asyncio.gather(*tasks)
if __name__ == '__main__':
b_id = "4306063500"
data = json.dumps({"book_id": b_id})
# 构建 URL
url = f'https://dushu.baidu.com/api/pc/getCatalog?data={data}'
asyncio.run(getCatalog(url))
```