import requests
from bs4 import BeautifulSoup
# 自定义函数,获取城市的AQI
def get_city_aqi(city_pinyin):
url = 'http://pm25.in/' + city_pinyin
r = requests.get(url, timeout=300)
# 创建BeautifulSoup对象
soup = BeautifulSoup(r.text, 'html.parser')
# 按照属性查找所有满足条件的节点
div_list = soup.find_all('div', {'class': 'span1'})
city_aqi = []
for i in range(8):
div_content = div_list[i]
caption = div_content.find('div', {'class': 'caption'}).text.strip()
value = div_content.find('div', {'class': 'value'}).text.strip()
# 将每一对caption和value作为一个元素增加到列表中
city_aqi.append((caption, value))
return city_aqi
# 自定义函数:获取所有的城市列表
def get_all_cities():
url = 'http://pm25.in/'
city_list = []
r = requests.get(url)
soup = BeautifulSoup(r.text, 'html.parser')
city_div = soup.find_all('div', {'class': 'bottom'})[1]
city_link_list = city_div.find_all('a')
for city_link in city_link_list:
# 获取每个城市的名称
city_name = city_link.text
# 获取每个城市的拼音
city_pinyin = city_link['href'][1:]
# 将每对城市名称和城市拼音作为一个元素写入城市列表中
city_list.append((city_name, city_pinyin))
return city_list
def main():
# 调用自定义函数,获取所有城市列表
city_list=get_all_cities()
for city in city_list:
city_name = city[0]
city_pinyin = city[1]
# 调用自定义函数,获取每个城市的AQI值
city_aqi = get_city_aqi(city_pinyin)
print(city_name, city_aqi)
if __name__ == '__main__':
main()