会计也学 发表于 2022-1-9 17:20:39

基础爬虫——爬许嵩

有没有大哥教一下如何爬百度上的许嵩图片
import urllib.request
import re
from bs4 import BeautifulSoup
def main():
    url="https://cn.bing.com/images/search?q=%E8%AE%B8%E5%B5%A9&form=HDRSC2&first=1&tsc=ImageBasicHover"
    datalist =getData(url)
find_picture=re.compile(r'href="/image/search?(.*?)""alt="许嵩 的图像结果"/>"')
def askurl(url):
    head = {"User-Agent":" Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.55 Safari/537.36 Edg/96.0.1054.43"}
    request = urllib.request.Request(url,headers=head)
    html=""
    try:
      response = urllib.request.urlopen(request)
      html=response.read().decode("utf-8")
    except urllib.error.URLError as e:
      if hasattr(e,"code"):
            print(e.code)
      if hasattr(e,"reason"):
            print(e.reason)
    return html
def getData(url):
    html=askurl(url)
    soup = BeautifulSoup(html, "html.parser")
    html=str(html)
    picture= re.findall(find_picture,html)
    print(picture)
if __name__=="__main__":
    main()
我这好像很多问题,尤其是正则表达式那里,感觉写不出来合适的。系统给的结果是可以执行,但并没有执行print(picture)这一步{:10_266:}{:10_266:}

大马强 发表于 2022-1-10 09:14:44

正则没写对呀,当然没结果
import urllib.request
import re
from bs4 import BeautifulSoup


def main():
    url = "https://cn.bing.com/images/search?q=%E8%AE%B8%E5%B5%A9&form=HDRSC2&first=1&tsc=ImageBasicHover"
    datalist = getData(url)

# 源码图片有两种格式
# <img class="mimg" style="background-color:#925839;color:#925839" height="182" width="182" src="https://tse4-mm.cn.bing.net/th/id/OIP-C.yTXj-rc8THlpAagM9o58TAHaHa?w=182&amp;h=182&amp;c=7&amp;r=0&amp;o=5&amp;dpr=1.25&amp;pid=1.7" alt="许嵩 的图像结果"/>
# <img class="mimg vimgld" style="background-color:#3c1d19;color:#3c1d19" height="188" width="134" data-src="https://tse1-mm.cn.bing.net/th/id/OIP-C.dKR_c0_6PDuIM_ewjq7vzQHaKY?w=134&amp;h=188&amp;c=7&amp;r=0&amp;o=5&amp;dpr=1.25&amp;pid=1.7" alt="许嵩 的图像结果"/>


find_picture = re.compile(
    r'<img class="mimg.*?".*?src=(".*?") alt="许嵩 的图像结果"')


def askurl(url):
    head = {
      "User-Agent": " Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.55 Safari/537.36 Edg/96.0.1054.43"}
    request = urllib.request.Request(url, headers=head)
    html = ""
    try:
      response = urllib.request.urlopen(request)
      html = response.read().decode("utf-8")
    except urllib.error.URLError as e:
      if hasattr(e, "code"):
            print(e.code)
      if hasattr(e, "reason"):
            print(e.reason)
    # print(html)
    return html


def getData(url):
    html = askurl(url)
    soup = BeautifulSoup(html, "html.parser")
    picture = re.findall(find_picture, html)
    # for i in
    print(picture, len(picture))


if __name__ == "__main__":
    main()

大马强 发表于 2022-1-10 09:23:03

['"https://tse4-mm.cn.bing.net/th/id/OIP-C.yTXj-rc8THlpAagM9o58TAHaHa?w=145&amp;h=180&amp;c=7&amp;r=0&amp;o=5&amp;pid=1.7"', '"https://tse1-mm.cn.bing.net/th/id/OIP-C.lKHLs8pzS_AEF9oyQJlMWAHaKC?w=115&amp;h=180&amp;c=7&amp;r=0&amp;o=5&amp;pid=1.7"', '"https://tse2-mm.cn.bing.net/th/id/OIP-C.G5iGItupfQ6wO7jo4OvEDwHaLH?w=115&amp;h=180&amp;c=7&amp;r=0&amp;o=5&amp;pid=1.7"', '"https://tse4-mm.cn.bing.net/th/id/OIP-C.CoEDaJYBoGXzB2KTrTUekQHaLH?w=115&amp;h=180&amp;c=7&amp;r=0&amp;o=5&amp;pid=1.7"', '"https://tse2-mm.cn.bing.net/th/id/OIP-C.Pp6SqAvYons5T1wQojCvEQHaEo?w=237&amp;h=180&amp;c=7&amp;r=0&amp;o=5&amp;pid=1.7"', '"https://tse2-mm.cn.bing.net/th/id/OIP-C.g3jWwd1OUQdchKuwpO8fjgHaEo?w=237&amp;h=180&amp;c=7&amp;r=0&amp;o=5&amp;pid=1.7"', '"https://tse3-mm.cn.bing.net/th/id/OIP-C.2x6GH87OA7FG6nGtuAfdRwHaE8?w=274&amp;h=183&amp;c=7&amp;r=0&amp;o=5&amp;pid=1.7"', '"https://tse4-mm.cn.bing.net/th/id/OIP-C.OrSBZFBknVpJB9q1rZOj_QHaIv?w=155&amp;h=183&amp;c=7&amp;r=0&amp;o=5&amp;pid=1.7"', '"https://tse3-mm.cn.bing.net/th/id/OIP-C.GHeAMIHzvXy5I8GdNm-zpQHaHa?w=183&amp;h=183&amp;c=7&amp;r=0&amp;o=5&amp;pid=1.7"', '"https://tse3-mm.cn.bing.net/th/id/OIP-C.Tg2myQOlxuCeLL8MguRBxAHaKD?w=135&amp;h=183&amp;c=7&amp;r=0&amp;o=5&amp;pid=1.7"', '"https://tse1-mm.cn.bing.net/th/id/OIP-C._rF-GVQh-1trTW5H6T_kaAHaGB?w=225&amp;h=183&amp;c=7&amp;r=0&amp;o=5&amp;pid=1.7"', '"https://tse2-mm.cn.bings://tse4-mm.cn.bing.net/th/id/OIP-C.BQVdT5WyY8to4Fme0Kz6ugHaHa?w=163&amp;h=180&amp;c=7&amp;r=0&amp;o=5&amp;pid=1.7"', '"https://tse2-mm.cn.bing.net/th/id/OIP-C.LqMch238IGdu6GxQpJikkAHaLm?w=137&amp;h=204&amp;c=7&amp;r=0&amp;o=5&amp;pid=1.7"', '"https://tse4-mm.cn.bing.net/th/id/OIP-C.mg6oqj6G6gy6jEfkRMuD1wHaE8?w=306&amp;h=204&amp;c=7&amp;r=0&amp;o=5&amp;pid=1.7"', '"https://tse2-mm.cn.bing.net/th/id/OIP-C.JwfQwfrSQAsgfGhsq1DNsgHaLH?w=146&amp;h=204&amp;c=7&amp;r=0&amp;o=5&amp;pid=1.7"', '"https://tse4-mm.cn.bing.net/th/id/OIP-C.-761PIGz7FrKdNsrCLxlagDVEk?w=159&amp;h=204&amp;c=7&amp;r=0&amp;o=5&amp;pid=1.7"', '"https://tse1-mm.cn.bing.net/th/id/OIP-C.dF-qEAjuaHsdvCyVgdKx0gHaHa?w=214&amp;h=204&amp;c=7&amp;r=0&amp;o=5&amp;pid=1.7"'] 25
但是感觉爬的不全,应该还有一种没找到,你可以自己找找
页: [1]
查看完整版本: 基础爬虫——爬许嵩