马上注册,结交更多好友,享用更多功能^_^
您需要 登录 才可以下载或查看,没有账号?立即注册
x
自学了一个多月的python,这几天学到爬虫了,写一个爬疫情数据的爬虫,
ps:需要安装requests库import requests
import json
"""请求头文件和url"""
headers = {'user-agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36'}
url = 'https://view.inews.qq.com/g2/getOnsInfo?name=disease_h5&callback=jQuery3410666023933987975_1584781205909&_=1584781205910'
"""解析网页"""
response = requests.get(url,headers = headers)
data = response.text
dic_data = data[40:-1]
#获取请求到的data文件
dic_data = json.loads(dic_data)
dic_data['data'] = json.loads(dic_data['data'])
#讲请求到的json字符串转换为字典
def country():
"""中国"""
china = dic_data['data']['chinaTotal']
#截取字典中的中国部分
# print(dic_data['data']['lastUpdateTime'])测试用
"""获取确诊治愈等信息"""
confirm = china['confirm']
heal = china['heal']
nowConfirm = china['nowConfirm']
nowSevere = china['nowSevere']
importedCase = china['importedCase']
dead = china['dead']
time = dic_data['data']['lastUpdateTime']
#防止输入错误或者未请求到文件报错
try:
print('截止到%s\n国内\n累计确诊:%s\n累计治愈:%s\n累计死亡:%s\n现有确诊:%s\n现有重症:%s\n境外输入:%s' % (time,confirm,heal,dead,nowConfirm,nowSevere,importedCase))
except:
print('出错了!!!')
def provinces():
"""省份"""
back_data = None
while back_data == None:
area_name = input('请输入要查询的省份')
children = dic_data['data']['areaTree'][0]['children']
for i in children:
if i["name"] == area_name:
back_data = i
print("查找成功")
"""获取省份数据"""
nowconfirm = back_data['today']['confirm']
total = back_data['total']
confirm = total['confirm']
dead = total['dead']
deadRate = total['deadRate']
heal = total['heal']
healRate = total['healRate']
try:
print(area_name + '\n新增确诊:%d\n累计确诊:%d\n累计死亡:%d\n累计治愈:%d\n死亡率:%s \n治愈率:%s' % (nowconfirm,confirm,dead,heal,deadRate,healRate))
except:
print('输入错误!/未查询到数据!!!')
def start():
while True:
search = input('请输入需要查询的类型(国家(g)/省市(s)/0为退出):')
if search == '国家' or search == 'g':
country()
elif search == '省市' or search == 's':
provinces()
elif search == '0':
break
else:
print('输入错误,请重新输入')
if __name__ == "__main__":
start()
|