小甲鱼 55讲
import urllib.request as urimport urllib.parse as upa
from bs4 import BeautifulSoup as bso
import re
def main():
keyword = input('请输入关键词:')
keyword2 = upa.quote(keyword)
url = 'http://baike.baidu.com/item/%s' % keyword2
http1 = ur.Request(url)
http1.addheaders = [('user-Agent','Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36')]
html1 = ur.urlopen(http1).read().decode('utf-8')
soup = bso(html1,'html.parser')
for i in soup.find_all(href=re.compile('item')):
content = ''.join()
url2 = ''.join(['http://baike.baidu.com',i['href']])
http2 = ur.Request(url2)
http2.addheaders = [('user-Agent','Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36')]
html2 = ur.urlopen(http2).read().decode('utf-8')
soup2 = bso(html2,'html.parser')
if soup2.h2:
content = ''.join()
content = ''.join()
print(content)
if __name__ == '__main__':
main()
#运行
请输入关键词:猪八戒
Traceback (most recent call last):
File "E:\55讲.py", line 35, in <module>
main()
File "E:\55讲.py", line 27, in main
html2 = ur.urlopen(http2).read().decode('utf-8')
File "D:\lib\urllib\request.py", line 223, in urlopen
return opener.open(url, data, timeout)
File "D:\lib\urllib\request.py", line 526, in open
response = self._open(req, data)
File "D:\lib\urllib\request.py", line 544, in _open
'_open', req)
File "D:\lib\urllib\request.py", line 504, in _call_chain
result = func(*args)
File "D:\lib\urllib\request.py", line 1346, in http_open
return self.do_open(http.client.HTTPConnection, req)
File "D:\lib\urllib\request.py", line 1318, in do_open
encode_chunked=req.has_header('Transfer-encoding'))
File "D:\lib\http\client.py", line 1239, in request
self._send_request(method, url, body, headers, encode_chunked)
File "D:\lib\http\client.py", line 1250, in _send_request
self.putrequest(method, url, **skips)
File "D:\lib\http\client.py", line 1117, in putrequest
self._output(request.encode('ascii'))
UnicodeEncodeError: 'ascii' codec can't encode characters in position 10-14: ordinal not in range(128)
这要怎么改{:10_266:} import urllib.request as ur
import urllib.parse as upa
from bs4 import BeautifulSoup as bso
import re
def main():
keyword = input('请输入关键词:')
keyword2 = upa.quote(keyword)
url = 'http://baike.baidu.com/item/%s' % keyword2
http1 = ur.Request(url)
http1.addheaders = [('user-Agent',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36')]
html1 = ur.urlopen(http1).read().decode('utf-8')
soup = bso(html1, 'html.parser')
for i in soup.find_all(href=re.compile('item')):
content = i.text # 这里优化一下,可以直接用,不用join。
url2 = 'http://baike.baidu.com' + upa.quote(i['href']) # 忘记转成url编码了
http2 = ur.Request(url2)
http2.addheaders = [('user-Agent',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36')]
html2 = ur.urlopen(http2).read().decode('utf-8')
soup2 = bso(html2, 'html.parser')
if soup2.h2:
content = ''.join() # 是h2的string,不是h2
content = ''.join()
print(content)
if __name__ == '__main__':
main() suchocolate 发表于 2020-12-2 10:21
懂了,谢谢大佬 suchocolate 发表于 2020-12-2 10:21
跑了一下,还是报错了。
Traceback (most recent call last):
File "D:\lib\urllib\request.py", line 1318, in do_open
encode_chunked=req.has_header('Transfer-encoding'))
File "D:\lib\http\client.py", line 1239, in request
self._send_request(method, url, body, headers, encode_chunked)
File "D:\lib\http\client.py", line 1285, in _send_request
self.endheaders(body, encode_chunked=encode_chunked)
File "D:\lib\http\client.py", line 1234, in endheaders
self._send_output(message_body, encode_chunked=encode_chunked)
File "D:\lib\http\client.py", line 1026, in _send_output
self.send(msg)
File "D:\lib\http\client.py", line 964, in send
self.connect()
File "D:\lib\http\client.py", line 936, in connect
(self.host,self.port), self.timeout, self.source_address)
File "D:\lib\socket.py", line 704, in create_connection
for res in getaddrinfo(host, port, 0, SOCK_STREAM):
File "D:\lib\socket.py", line 745, in getaddrinfo
for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
socket.gaierror: getaddrinfo failed
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "E:\55讲.py", line 35, in <module>
main()
File "E:\55讲.py", line 27, in main
html2 = ur.urlopen(http2).read().decode('utf-8')
File "D:\lib\urllib\request.py", line 223, in urlopen
return opener.open(url, data, timeout)
File "D:\lib\urllib\request.py", line 526, in open
response = self._open(req, data)
File "D:\lib\urllib\request.py", line 544, in _open
'_open', req)
File "D:\lib\urllib\request.py", line 504, in _call_chain
result = func(*args)
File "D:\lib\urllib\request.py", line 1346, in http_open
return self.do_open(http.client.HTTPConnection, req)
File "D:\lib\urllib\request.py", line 1320, in do_open
raise URLError(err)
urllib.error.URLError: <urlopen error getaddrinfo failed> oneface 发表于 2020-12-2 15:23
跑了一下,还是报错了。
Traceback (most recent call last):
File "D:\lib%urllib\request.py", lin ...
每次爬到倒数第二个就报错 oneface 发表于 2020-12-2 16:06
每次爬到倒数第二个就报错
我这里没有报错
你现在运行的代码再发上来,还有输入的关键词。 import urllib.request as ur
import urllib.parse as upa
from bs4 import BeautifulSoup as bso
import re
import time
def main():
keyword = input('请输入关键词:')
keyword2 = upa.quote(keyword)
url = 'http://baike.baidu.com/item/%s' % keyword2
http1 = ur.Request(url)
http1.addheaders = [('user-Agent','Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36')]
html1 = ur.urlopen(http1).read().decode('utf-8')
soup = bso(html1,'html.parser')
for i in soup.find_all(href=re.compile('item')):
content = i.text
url2 = ''.join(['http://baike.baidu.com',upa.quote(i['href'])])
http2 = ur.Request(url2)
http2.addheaders = [('user-Agent','Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36')]
html2 = ur.urlopen(http2).read().decode('utf-8')
soup2 = bso(html2,'html.parser')
if soup2.h2:
content = ''.join()
content = ''.join()
print(content)
if __name__ == '__main__':
main()
suchocolate 发表于 2020-12-2 16:58
我这里没有报错
你现在运行的代码再发上来,还有输入的关键词。
关键词:猪八戒
页:
[1]