import requests
from bs4 import BeautifulSoup
try:
#爬虫模块
response = requests.get("http://www.weather.com.cn/weather1d/101230301.shtml")
response.encoding = "utf-8"
#网页内容提取模块
bs = BeautifulSoup(response.text,'html.parser')
# print(bs.find_all('div',class_='t'))
content1 = bs.find_all('p',class_='tem');content1_=[]
content2 = bs.find_all('p',class_='wea');content2_=[]
content3 = bs.find_all('p',class_='win');content3_=[]
content4 = bs.find_all('p',class_='sun sunUp');content4_=[]
content5 = bs.find_all('p',class_='sun sunDown');content5_=[]
content6 = bs.find_all('ul',class_='clearfix');content6_=[]
content6 = [content6[1]]
#打印
for index in range(1,7):
for content in eval(f'content{index}'):
print(content.get_text())
except Exception as e:
#打印异常信息
print(e)
import requests
from bs4 import BeautifulSoup
try:
#爬虫模块
response = requests.get("http://www.weather.com.cn/weather1d/101230301.shtml")
response.encoding = "utf-8"
#网页内容提取模块
bs = BeautifulSoup(response.text,'html.parser')
# print(bs.find_all('div',class_='t'))
content1 = bs.find_all('p',class_='tem');content1_=[]
content2 = bs.find_all('p',class_='wea');content2_=[]
content3 = bs.find_all('p',class_='win');content3_=[]
content4 = bs.find_all('p',class_='sun sunUp');content4_=[]
content5 = bs.find_all('p',class_='sun sunDown');content5_=[]
content6 = bs.find_all('ul',class_='clearfix');content6_=[]
content6 = [content6[1]]
#打印
for index in range(1,7):
for content in eval(f'content{index}'):
eval(f'content{index}_').append(content.get_text().replace('\n',''))
print(eval(f'content{index}_'))
except Exception as e:
#打印异常信息
print(e)
|