鱼C论坛

 找回密码
 立即注册
查看: 1675|回复: 2

[已解决]为什么会报错啊?你们能试一下吗?求大佬指点

[复制链接]
发表于 2020-4-14 11:41:42 | 显示全部楼层 |阅读模式

马上注册,结交更多好友,享用更多功能^_^

您需要 登录 才可以下载或查看,没有账号?立即注册

x
[code]import requests
from bs4 import BeautifulSoup
from pyecharts.charts import Bar

all_data = []


def parse_page(url):  # 解析url页面函数
    headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:75.0) Gecko/20100101 Firefox/75.0'}
    response = requests.get(url, headers=headers)
    # print(response.content.decode('utf-8'))#当解码之后的页面有乱码说明用response.test()猜错了解码方式,则应该改成response.content.decode('utf-8')
    text = response.content.decode('utf-8')
    soup = BeautifulSoup(text, 'lxml')
    conMidtab = soup.find('div', class_='conMidtab')
    tables = conMidtab.find_all('table')
    for table in tables:
        trs = table.find_all('tr')[2:]
        for index, tr in enumerate(trs):
            tds = tr.find_all('td')
            city_td = tds[0]
            if index == 0:
                city_td = tds[1]
            city = list(city_td.stripped_strings)[0]
            temp_td = tds[-2]
            min_temp = list(temp_td.stripped_strings)[0]
            data = all_data.append({'city': city, 'min_temp': int(min_temp)})
            # print({'city':city,'min_temp':int(min_temp)})


def main():
    urls = {'http://www.weather.com.cn/textFC/hb.shtml',
            'http://www.weather.com.cn/textFC/hb.shtml',
            'http://www.weather.com.cn/textFC/db.shtml',
            'http://www.weather.com.cn/textFC/hz.shtml',
            'http://www.weather.com.cn/textFC/xb.shtml',
            'http://www.weather.com.cn/textFC/xn.shtml'
            }
    # url = 'http://www.weather.com.cn/textFC/hb.shtml'
    # url = 'http://www.weather.com.cn/textFC/hn.shtml'
    # url = 'http://www.weather.com.cn/textFC/db.shtml'
    # url = 'http://www.weather.com.cn/textFC/hz.shtml'
    # url = 'http://www.weather.com.cn/textFC/xb.shtml'
    # url = 'http://www.weather.com.cn/textFC/xn.shtml'
    # url = 'http://www.weather.com.cn/textFC/gat.shtml'
    for url in urls:
        parse_page(url)

    all_data.sort(key=lambda data: data['min_temp'])
    data = all_data[0:10]
    cities = list(map(lambda x: x['city'], data))
    temps = list(map(lambda x: x['min_temp'], data))
    chart = Bar('中国气温最低温度排行')
    chart.add('', cities, temps)
    chart.render('temperature1.html')


if __name__ == '__main__':
    main()
[/code]



报错是这样的:
Traceback (most recent call last):
  File "C:/Users/lenovo/PycharmProjects/untitled6/SCRAPY/DAY5/2222.py", line 58, in <module>
    main()
  File "C:/Users/lenovo/PycharmProjects/untitled6/SCRAPY/DAY5/2222.py", line 52, in main
    chart = Bar('中国气温最低温度排行')
  File "C:\Users\lenovo\AppData\Local\Programs\Python\Python38-32\lib\site-packages\pyecharts\charts\chart.py", line 163, in __init__
    super().__init__(init_opts=init_opts)
  File "C:\Users\lenovo\AppData\Local\Programs\Python\Python38-32\lib\site-packages\pyecharts\charts\chart.py", line 14, in __init__
    super().__init__(init_opts=init_opts)
  File "C:\Users\lenovo\AppData\Local\Programs\Python\Python38-32\lib\site-packages\pyecharts\charts\base.py", line 28, in __init__
    self.width = _opts.get("width", "900px")
AttributeError: 'str' object has no attribute 'get'
最佳答案
2020-4-14 12:57:46
这样试试:
import requests
from bs4 import BeautifulSoup
from pyecharts.charts import Bar

all_data = []


def parse_page(url):  # 解析url页面函数
    headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:75.0) Gecko/20100101 Firefox/75.0'}
    response = requests.get(url, headers=headers)
    # print(response.content.decode('utf-8'))#当解码之后的页面有乱码说明用response.test()猜错了解码方式,则应该改成response.content.decode('utf-8')
    text = response.content.decode('utf-8')
    soup = BeautifulSoup(text, 'lxml')
    conMidtab = soup.find('div', class_='conMidtab')
    tables = conMidtab.find_all('table')
    for table in tables:
        trs = table.find_all('tr')[2:]
        for index, tr in enumerate(trs):
            tds = tr.find_all('td')
            city_td = tds[0]
            if index == 0:
                city_td = tds[1]
            city = list(city_td.stripped_strings)[0]
            temp_td = tds[-2]
            min_temp = list(temp_td.stripped_strings)[0]
            data = all_data.append({'city': city, 'min_temp': int(min_temp)})
            # print({'city':city,'min_temp':int(min_temp)})


def main():
    urls = {'http://www.weather.com.cn/textFC/hb.shtml',
            'http://www.weather.com.cn/textFC/hb.shtml',
            'http://www.weather.com.cn/textFC/db.shtml',
            'http://www.weather.com.cn/textFC/hz.shtml',
            'http://www.weather.com.cn/textFC/xb.shtml',
            'http://www.weather.com.cn/textFC/xn.shtml'
            }
    # url = 'http://www.weather.com.cn/textFC/hb.shtml'
    # url = 'http://www.weather.com.cn/textFC/hn.shtml'
    # url = 'http://www.weather.com.cn/textFC/db.shtml'
    # url = 'http://www.weather.com.cn/textFC/hz.shtml'
    # url = 'http://www.weather.com.cn/textFC/xb.shtml'
    # url = 'http://www.weather.com.cn/textFC/xn.shtml'
    # url = 'http://www.weather.com.cn/textFC/gat.shtml'
    for url in urls:
        parse_page(url)

    all_data.sort(key=lambda data: data['min_temp'])
    data = all_data[0:10]
    cities = list(map(lambda x: x['city'], data))
    temps = list(map(lambda x: x['min_temp'], data))
    chart = Bar()
    chart.add_xaxis(cities)
    chart.add_yaxis('中国气温最低温度排行', temps)
    chart.render('temperature1.html')


if __name__ == '__main__':
    main()
想知道小甲鱼最近在做啥?请访问 -> ilovefishc.com
回复

使用道具 举报

发表于 2020-4-14 12:57:46 | 显示全部楼层    本楼为最佳答案   
这样试试:
import requests
from bs4 import BeautifulSoup
from pyecharts.charts import Bar

all_data = []


def parse_page(url):  # 解析url页面函数
    headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:75.0) Gecko/20100101 Firefox/75.0'}
    response = requests.get(url, headers=headers)
    # print(response.content.decode('utf-8'))#当解码之后的页面有乱码说明用response.test()猜错了解码方式,则应该改成response.content.decode('utf-8')
    text = response.content.decode('utf-8')
    soup = BeautifulSoup(text, 'lxml')
    conMidtab = soup.find('div', class_='conMidtab')
    tables = conMidtab.find_all('table')
    for table in tables:
        trs = table.find_all('tr')[2:]
        for index, tr in enumerate(trs):
            tds = tr.find_all('td')
            city_td = tds[0]
            if index == 0:
                city_td = tds[1]
            city = list(city_td.stripped_strings)[0]
            temp_td = tds[-2]
            min_temp = list(temp_td.stripped_strings)[0]
            data = all_data.append({'city': city, 'min_temp': int(min_temp)})
            # print({'city':city,'min_temp':int(min_temp)})


def main():
    urls = {'http://www.weather.com.cn/textFC/hb.shtml',
            'http://www.weather.com.cn/textFC/hb.shtml',
            'http://www.weather.com.cn/textFC/db.shtml',
            'http://www.weather.com.cn/textFC/hz.shtml',
            'http://www.weather.com.cn/textFC/xb.shtml',
            'http://www.weather.com.cn/textFC/xn.shtml'
            }
    # url = 'http://www.weather.com.cn/textFC/hb.shtml'
    # url = 'http://www.weather.com.cn/textFC/hn.shtml'
    # url = 'http://www.weather.com.cn/textFC/db.shtml'
    # url = 'http://www.weather.com.cn/textFC/hz.shtml'
    # url = 'http://www.weather.com.cn/textFC/xb.shtml'
    # url = 'http://www.weather.com.cn/textFC/xn.shtml'
    # url = 'http://www.weather.com.cn/textFC/gat.shtml'
    for url in urls:
        parse_page(url)

    all_data.sort(key=lambda data: data['min_temp'])
    data = all_data[0:10]
    cities = list(map(lambda x: x['city'], data))
    temps = list(map(lambda x: x['min_temp'], data))
    chart = Bar()
    chart.add_xaxis(cities)
    chart.add_yaxis('中国气温最低温度排行', temps)
    chart.render('temperature1.html')


if __name__ == '__main__':
    main()
想知道小甲鱼最近在做啥?请访问 -> ilovefishc.com
回复 支持 反对

使用道具 举报

 楼主| 发表于 2020-4-14 20:21:45 | 显示全部楼层
我看的是之前的视频,以前的版本和现在bar应该有些改变是吧
想知道小甲鱼最近在做啥?请访问 -> ilovefishc.com
回复 支持 反对

使用道具 举报

您需要登录后才可以回帖 登录 | 立即注册

本版积分规则

小黑屋|手机版|Archiver|鱼C工作室 ( 粤ICP备18085999号-1 | 粤公网安备 44051102000585号)

GMT+8, 2024-11-26 12:34

Powered by Discuz! X3.4

© 2001-2023 Discuz! Team.

快速回复 返回顶部 返回列表