|
马上注册,结交更多好友,享用更多功能^_^
您需要 登录 才可以下载或查看,没有账号?立即注册
x
[code]import requests
from bs4 import BeautifulSoup
from pyecharts.charts import Bar
all_data = []
def parse_page(url): # 解析url页面函数
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:75.0) Gecko/20100101 Firefox/75.0'}
response = requests.get(url, headers=headers)
# print(response.content.decode('utf-8'))#当解码之后的页面有乱码说明用response.test()猜错了解码方式,则应该改成response.content.decode('utf-8')
text = response.content.decode('utf-8')
soup = BeautifulSoup(text, 'lxml')
conMidtab = soup.find('div', class_='conMidtab')
tables = conMidtab.find_all('table')
for table in tables:
trs = table.find_all('tr')[2:]
for index, tr in enumerate(trs):
tds = tr.find_all('td')
city_td = tds[0]
if index == 0:
city_td = tds[1]
city = list(city_td.stripped_strings)[0]
temp_td = tds[-2]
min_temp = list(temp_td.stripped_strings)[0]
data = all_data.append({'city': city, 'min_temp': int(min_temp)})
# print({'city':city,'min_temp':int(min_temp)})
def main():
urls = {'http://www.weather.com.cn/textFC/hb.shtml',
'http://www.weather.com.cn/textFC/hb.shtml',
'http://www.weather.com.cn/textFC/db.shtml',
'http://www.weather.com.cn/textFC/hz.shtml',
'http://www.weather.com.cn/textFC/xb.shtml',
'http://www.weather.com.cn/textFC/xn.shtml'
}
# url = 'http://www.weather.com.cn/textFC/hb.shtml'
# url = 'http://www.weather.com.cn/textFC/hn.shtml'
# url = 'http://www.weather.com.cn/textFC/db.shtml'
# url = 'http://www.weather.com.cn/textFC/hz.shtml'
# url = 'http://www.weather.com.cn/textFC/xb.shtml'
# url = 'http://www.weather.com.cn/textFC/xn.shtml'
# url = 'http://www.weather.com.cn/textFC/gat.shtml'
for url in urls:
parse_page(url)
all_data.sort(key=lambda data: data['min_temp'])
data = all_data[0:10]
cities = list(map(lambda x: x['city'], data))
temps = list(map(lambda x: x['min_temp'], data))
chart = Bar('中国气温最低温度排行')
chart.add('', cities, temps)
chart.render('temperature1.html')
if __name__ == '__main__':
main()
[/code]
报错是这样的:
Traceback (most recent call last):
File "C:/Users/lenovo/PycharmProjects/untitled6/SCRAPY/DAY5/2222.py", line 58, in <module>
main()
File "C:/Users/lenovo/PycharmProjects/untitled6/SCRAPY/DAY5/2222.py", line 52, in main
chart = Bar('中国气温最低温度排行')
File "C:\Users\lenovo\AppData\Local\Programs\Python\Python38-32\lib\site-packages\pyecharts\charts\chart.py", line 163, in __init__
super().__init__(init_opts=init_opts)
File "C:\Users\lenovo\AppData\Local\Programs\Python\Python38-32\lib\site-packages\pyecharts\charts\chart.py", line 14, in __init__
super().__init__(init_opts=init_opts)
File "C:\Users\lenovo\AppData\Local\Programs\Python\Python38-32\lib\site-packages\pyecharts\charts\base.py", line 28, in __init__
self.width = _opts.get("width", "900px")
AttributeError: 'str' object has no attribute 'get'
这样试试:
import requests
from bs4 import BeautifulSoup
from pyecharts.charts import Bar
all_data = []
def parse_page(url): # 解析url页面函数
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:75.0) Gecko/20100101 Firefox/75.0'}
response = requests.get(url, headers=headers)
# print(response.content.decode('utf-8'))#当解码之后的页面有乱码说明用response.test()猜错了解码方式,则应该改成response.content.decode('utf-8')
text = response.content.decode('utf-8')
soup = BeautifulSoup(text, 'lxml')
conMidtab = soup.find('div', class_='conMidtab')
tables = conMidtab.find_all('table')
for table in tables:
trs = table.find_all('tr')[2:]
for index, tr in enumerate(trs):
tds = tr.find_all('td')
city_td = tds[0]
if index == 0:
city_td = tds[1]
city = list(city_td.stripped_strings)[0]
temp_td = tds[-2]
min_temp = list(temp_td.stripped_strings)[0]
data = all_data.append({'city': city, 'min_temp': int(min_temp)})
# print({'city':city,'min_temp':int(min_temp)})
def main():
urls = {'http://www.weather.com.cn/textFC/hb.shtml',
'http://www.weather.com.cn/textFC/hb.shtml',
'http://www.weather.com.cn/textFC/db.shtml',
'http://www.weather.com.cn/textFC/hz.shtml',
'http://www.weather.com.cn/textFC/xb.shtml',
'http://www.weather.com.cn/textFC/xn.shtml'
}
# url = 'http://www.weather.com.cn/textFC/hb.shtml'
# url = 'http://www.weather.com.cn/textFC/hn.shtml'
# url = 'http://www.weather.com.cn/textFC/db.shtml'
# url = 'http://www.weather.com.cn/textFC/hz.shtml'
# url = 'http://www.weather.com.cn/textFC/xb.shtml'
# url = 'http://www.weather.com.cn/textFC/xn.shtml'
# url = 'http://www.weather.com.cn/textFC/gat.shtml'
for url in urls:
parse_page(url)
all_data.sort(key=lambda data: data['min_temp'])
data = all_data[0:10]
cities = list(map(lambda x: x['city'], data))
temps = list(map(lambda x: x['min_temp'], data))
chart = Bar()
chart.add_xaxis(cities)
chart.add_yaxis('中国气温最低温度排行', temps)
chart.render('temperature1.html')
if __name__ == '__main__':
main()
|
|