lzb1001 发表于 2022-4-7 22:49:23

输入任意关键词抓取百度百科副标题返回错误提示

import urllib.request
import urllib.parse
import re
from bs4 import BeautifulSoup

def main():
    keyword = input('请输入检索关键词:')
    param = urllib.parse.urlencode({'word':keyword})
    url = 'https://baike.baidu.com/item/%s' % param

    req = urllib.request.Request(url)
    response = urllib.request.urlopen(req)
    html = response.read().decode('utf-8')
    soup = BeautifulSoup(html, 'html.parser')

    for each in soup.find_all(href = re.compile('item')):
      content = ''.join()
      url2 = ''.join(['http://baike.baidu.com', each['href']])
      req2 = urllib.request.Request(url2)
      response2 = urllib.request.urlopen(req2)
      html2 = response2.read().decode('utf-8')
      soup2 = BeautifulSoup(html2, 'html.parser')
      if soup2.h2:
            content = ''.join()
      content = ''.join()
      print(content)

if __name__ == '__main__':
    main()

运行后:

请输入检索关键词:猪八戒
Traceback (most recent call last):
File "D:\work\p14_92.py", line 34, in <module>
    main()
File "D:\work\p14_92.py", line 25, in main
    response2 = urllib.request.urlopen(req2)
File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 222, in urlopen
    return opener.open(url, data, timeout)
File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 525, in open
    response = self._open(req, data)
File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 543, in _open
    '_open', req)
File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 503, in _call_chain
    result = func(*args)
File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 1347, in http_open
    return self.do_open(http.client.HTTPConnection, req)
File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 1319, in do_open
    encode_chunked=req.has_header('Transfer-encoding'))
File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\http\client.py", line 1252, in request
    self._send_request(method, url, body, headers, encode_chunked)
File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\http\client.py", line 1263, in _send_request
    self.putrequest(method, url, **skips)
File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\http\client.py", line 1118, in putrequest
    self._output(self._encode_request(request))
File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\http\client.py", line 1198, in _encode_request
    return request.encode('ascii')
UnicodeEncodeError: 'ascii' codec can't encode characters in position 10-14: ordinal not in range(128)

isdkz 发表于 2022-4-8 08:25:59

本帖最后由 isdkz 于 2022-4-8 08:41 编辑

import urllib.request
import urllib.parse
import re
from bs4 import BeautifulSoup

def main():
    keyword = input('请输入检索关键词:')
    param = urllib.parse.urlencode({'word':keyword})
    url = 'https://baike.baidu.com/item/%s' % param

    req = urllib.request.Request(url)
    response = urllib.request.urlopen(req)
    html = response.read().decode('utf-8')
    soup = BeautifulSoup(html, 'html.parser')

    for each in soup.find_all(href = re.compile('item')):
      content = ''.join()
      url2 = ''.join(['http://baike.baidu.com',
            temp if "%" in (temp:=each['href']) else urllib.parse.quote(temp)])                                    # 判断是否有进行了 url 编码,没有的话对它进行编码
      req2 = urllib.request.Request(url2)
      response2 = urllib.request.urlopen(req2)
      html2 = response2.read().decode('utf-8')
      soup2 = BeautifulSoup(html2, 'html.parser')
      if soup2.h2:
            content = ''.join()
      content = ''.join()
      print(content)

if __name__ == '__main__':
    main()

lzb1001 发表于 2022-4-8 09:59:47

isdkz 发表于 2022-4-8 08:25


url2 = ''.join(['http://baike.baidu.com',
            temp if "%" in (temp:=each['href']) else urllib.parse.quote(temp)])                                    # 判断是否有进行了 url 编码,没有的话对它进行编码

刚测试了,无法运行此行代码,是不是哪里错了

lzb1001 发表于 2022-4-8 12:14:23

isdkz 发表于 2022-4-8 08:25


url2 = ''.join(['http://baike.baidu.com',
            temp if "%" in (temp:=each['href']) else urllib.parse.quote(temp)])                                    # 判断是否有进行了 url 编码,没有的话对它进行编码

运行时提示有语法错误呢

lzb1001 发表于 2022-4-8 16:14:11

上面的代码运行起来会报错

lzb1001 发表于 2022-4-8 16:14:53

isdkz 发表于 2022-4-8 08:25


url2 = ''.join(['……',
            temp if "%" in (temp:=each['href']) else urllib.parse.quote(temp)])                                    # 判断是否有进行了 url 编码,没有的话对它进行编码

这句代码好像有语法错误

isdkz 发表于 2022-4-8 16:18:28

lzb1001 发表于 2022-4-8 16:14
url2 = ''.join(['……',
            temp if "%" in (temp:=each['href']) else urllib.parse.quote(t ...

这是 python3.8 之后才有的写法,python3.8 之前不能用 :=
import urllib.request
import urllib.parse
import re
from bs4 import BeautifulSoup

def main():
    keyword = input('请输入检索关键词:')
    param = urllib.parse.urlencode({'word':keyword})
    url = 'https://baike.baidu.com/item/%s' % param

    req = urllib.request.Request(url)
    response = urllib.request.urlopen(req)
    html = response.read().decode('utf-8')
    soup = BeautifulSoup(html, 'html.parser')

    for each in soup.find_all(href = re.compile('item')):
      content = ''.join()
      url2 = ''.join(['http://baike.baidu.com',
            each['href'] if "%" in each['href'] else urllib.parse.quote(each['href'])])                                    # 判断是否有进行了 url 编码,没有的话对它进行编码
      req2 = urllib.request.Request(url2)
      response2 = urllib.request.urlopen(req2)
      html2 = response2.read().decode('utf-8')
      soup2 = BeautifulSoup(html2, 'html.parser')
      if soup2.h2:
            content = ''.join()
      content = ''.join()
      print(content)

if __name__ == '__main__':
    main()

lzb1001 发表于 2022-4-8 22:53:32

1、经测试,以下两个URL好像都可以:
(1)url = '……/item/%s' % param
(2)url = '……/search/word?%s' % param
为什么?一般看哪里&用哪个?

2、关于for each in soup.find_all(href = re.compile('item')):
本想去除运行结果头5个和尾1个无关的结果,所以在代码尾部加上列表分片,但实际仍显示头5个无关结果:

秒懂本尊答目录->……/item/%E7%A7%92%E6%87%82%E6%9C%AC%E5%B0%8A%E7%AD%94
秒懂大师说目录->……/item/%E7%A7%92%E6%87%82%E5%A4%A7%E5%B8%88%E8%AF%B4
秒懂看瓦特目录->……/item/%E7%A7%92%E6%87%82%E7%9C%8B%E7%93%A6%E7%89%B9
秒懂五千年目录->……/item/%E7%A7%92%E6%87%82%E4%BA%94%E5%8D%83%E5%B9%B4
秒懂全视界『寻找世界杯-俄罗斯』特辑->……/item/%E7%A7%92%E6%87%82%E5%85%A8%E8%A7%86%E7%95%8C
……

也就是说在代码尾部加上列表分片,实际仅过滤了下面这个尾1个无关结果:
本人编辑目录->http://baike.baidu.com/item/%E7%99%BE%E5%BA%A6%E7%99%BE%E7%A7%91%EF%BC%9A%E6%9C%AC%E4%BA%BA%E8%AF%8D%E6%9D%A1%E7%BC%96%E8%BE%91%E6%9C%8D%E5%8A%A1/22442459?bk_fr=pcFooter


所以看来加入列表分片好像只起到了部分作用,但如果:

(1)删除列表分片,程序无法运行并返回以下错误结果:

请输入检索关键词:猪八戒
Traceback (most recent call last):
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 1319, in do_open
    encode_chunked=req.has_header('Transfer-encoding'))
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\http\client.py", line 1252, in request
    self._send_request(method, url, body, headers, encode_chunked)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\http\client.py", line 1298, in _send_request
    self.endheaders(body, encode_chunked=encode_chunked)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\http\client.py", line 1247, in endheaders
    self._send_output(message_body, encode_chunked=encode_chunked)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\http\client.py", line 1026, in _send_output
    self.send(msg)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\http\client.py", line 966, in send
    self.connect()
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\http\client.py", line 938, in connect
    (self.host,self.port), self.timeout, self.source_address)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\socket.py", line 707, in create_connection
    for res in getaddrinfo(host, port, 0, SOCK_STREAM):
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\socket.py", line 752, in getaddrinfo
    for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
socket.gaierror: getaddrinfo failed

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
  File "C:/Users/dell/Desktop/123.py", line 30, in <module>
    main()
  File "C:/Users/dell/Desktop/123.py", line 21, in main
    response2 = urllib.request.urlopen(req2)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 222, in urlopen
    return opener.open(url, data, timeout)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 525, in open
    response = self._open(req, data)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 543, in _open
    '_open', req)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 503, in _call_chain
    result = func(*args)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 1347, in http_open
    return self.do_open(http.client.HTTPConnection, req)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 1321, in do_open
    raise URLError(err)
urllib.error.URLError: <urlopen error getaddrinfo failed>

(2)将列表分片修改为[:-1],则返回如下错误提示:
请输入检索关键词:猪八戒
Traceback (most recent call last):
File "D:\work\p14_92.py", line 35, in <module>
    main()
File "D:\work\p14_92.py", line 24, in main
    url2 = ''.join(['http://baike.baidu.com', each['href'] if "%" in each['href'] else urllib.parse.quote(each['href'])]) # 判断是否有进行了 url 编码,没有的话对它进行编码
TypeError: string indices must be integers

3、如果要加入伪装的header,应如何添加?

4、猪八戒的副标题(中国古典名著《西游记》中的主角之一)好像在运行结果中无显示

lzb1001 发表于 2022-4-9 14:19:54

本帖最后由 lzb1001 于 2022-4-9 14:26 编辑

isdkz 发表于 2022-4-8 16:18
这是 python3.8 之后才有的写法,python3.8 之前不能用 :=

1、下面两个URL好像都可以,为什么会这样?主要应该看哪里用哪个呢?
url = '……/search/word?%s' % param
url = '……/item/%s' % param

2、用了列表分片后,仅过滤最后一条无用记录,开头几条无法过滤;若不用列表分片或小于5比如或仅用[:-1]还会报错无法运行,这是为什么?

3、如果要加入伪装的header,上述headers用法是否正确?
headers={'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36'}

4、输入关键词“黄埔军校”,刚开始正常抓取,快结束时会报如下错误,不知道什么原因
Traceback (most recent call last):
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 1319, in do_open
    encode_chunked=req.has_header('Transfer-encoding'))
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\http\client.py", line 1252, in request
    self._send_request(method, url, body, headers, encode_chunked)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\http\client.py", line 1298, in _send_request
    self.endheaders(body, encode_chunked=encode_chunked)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\http\client.py", line 1247, in endheaders
    self._send_output(message_body, encode_chunked=encode_chunked)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\http\client.py", line 1026, in _send_output
    self.send(msg)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\http\client.py", line 966, in send
    self.connect()
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\http\client.py", line 938, in connect
    (self.host,self.port), self.timeout, self.source_address)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\socket.py", line 707, in create_connection
    for res in getaddrinfo(host, port, 0, SOCK_STREAM):
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\socket.py", line 752, in getaddrinfo
    for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
socket.gaierror: getaddrinfo failed

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
  File "D:\work\p14_92.py", line 35, in <module>
    main()
  File "D:\work\p14_92.py", line 26, in main
    response2 = urllib.request.urlopen(req2)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 222, in urlopen
    return opener.open(url, data, timeout)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 525, in open
    response = self._open(req, data)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 543, in _open
    '_open', req)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 503, in _call_chain
    result = func(*args)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 1347, in http_open
    return self.do_open(http.client.HTTPConnection, req)
  File "C:\Users\dell\AppData\Local\Programs\Python\Python37\lib\urllib\request.py", line 1321, in do_open
    raise URLError(err)
urllib.error.URLError: <urlopen error getaddrinfo failed>
页: [1]
查看完整版本: 输入任意关键词抓取百度百科副标题返回错误提示