这个是爬取news.qq.com网站广东疫情的数据写入到d:\gddd.xlsx的例子。
代码如下:(如有帮助,望采纳!谢谢! 点击我这个回答右上方的【采纳】按钮)
import requests
import openpyxl
import json
url = "https://api.inews.qq.com/newsqa/v1/query/inner/publish/modules/list?modules=chinaDayList,chinaDayAddList,nowConfirmStatis,provinceCompare"
headers={'Host' : 'api.inews.qq.com',
'Origin' : 'https://news.qq.com',
'Referer' : 'https://news.qq.com/zt2020/page/feiyan.htm',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 UBrowser/6.2.4098.3 Safari/537.36'}
response = requests.post(url=url,headers=headers)
data = response.json()['data']['chinaDayAddList']
wb = openpyxl.Workbook()
ws=wb.active
ws.title ="广东疫情"
ws.append(['年份','日期','累计确诊','新增确诊数','总治愈数','总死亡数','新增死亡数','新增治愈数'])
for each in data:
ws.append([each['y'],each['date'],each['confirm'],each['localConfirmadd'],each['heal'],each['dead'],each['deadRate'],each['healRate']])
wb.save(r"d:\gddd.xlsx")