鱼C论坛

 找回密码
 立即注册
查看: 967|回复: 5

[已解决]怎样才能让我爬取下来的数据保存到EXCEL表或者CSV文件里呢

[复制链接]
发表于 2020-8-3 15:53:26 | 显示全部楼层 |阅读模式

马上注册,结交更多好友,享用更多功能^_^

您需要 登录 才可以下载或查看,没有账号?立即注册

x
# -*- coding: utf-8 -*-
"""
Created on Mon Aug  3 13:43:56 2020

@author: Administrator
"""

import os, re
import requests
import random
import time
from bs4 import BeautifulSoup
import xlrd





user_agent_list = [
    "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",
    "Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",
    "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6",
    "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6",
    "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1",
    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5",
    "Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5",
    "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
    "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
    "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
    "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
    "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
    "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
    "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
    "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3",
    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
    "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24"
]
UA = random.choice(user_agent_list)  ##从self.user_agent_list中随机取出一个字符串
headers = {'User-Agent': UA}  ##构造成一个完整的User-Agent (UA代表的是上面随机取出来的字符串哦)


url = 'https://cd.ke.com/xiaoqu/damian/pg'

for x in range(1,3):

    time.sleep(random.randint(2,5))

    with requests.get(url+str(x)+'ddo22p7', headers=headers, timeout=5) as response:

        soup = BeautifulSoup(response.text, 'lxml')

        # print(soup.title.text)  # 打印title

        li_list = soup.find('ul', class_='listContent').find_all('li')
        # print(li_list)
        for li_quick in li_list:

            try:
               
                #取名称
                title=li_quick.find('div',class_='title').a.get_text().strip()
              
            
                #取位置信息
                positionInfo=li_quick.find('div',class_='positionInfo').get_text().strip()
               
               
            except:
                continue
            finally:
                print(title,',',positionInfo)
121212.png
最佳答案
2020-8-3 16:12:50


这样吧,直接写入 csv 了,代码中多余的空格也帮你替换掉了,试试看(注意:如果 Excel 打开出现乱码,那么通过笔记本打开即可):

  1. # -*- coding: utf-8 -*-
  2. """
  3. Created on Mon Aug  3 13:43:56 2020

  4. @author: Administrator
  5. """

  6. import os, re
  7. import requests
  8. import random
  9. import time
  10. from bs4 import BeautifulSoup
  11. import xlrd

  12. user_agent_list = [
  13.     "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",
  14.     "Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",
  15.     "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6",
  16.     "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6",
  17.     "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1",
  18.     "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5",
  19.     "Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5",
  20.     "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
  21.     "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
  22.     "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
  23.     "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
  24.     "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
  25.     "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
  26.     "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
  27.     "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
  28.     "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3",
  29.     "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
  30.     "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24"
  31. ]
  32. UA = random.choice(user_agent_list)  ##从self.user_agent_list中随机取出一个字符串
  33. headers = {'User-Agent': UA}  ##构造成一个完整的User-Agent (UA代表的是上面随机取出来的字符串哦)

  34. url = 'https://cd.ke.com/xiaoqu/damian/pg'

  35. for x in range(1, 3):

  36.     time.sleep(random.randint(2, 5))

  37.     with requests.get(url + str(x) + 'ddo22p7', headers=headers, timeout=5) as response:

  38.         soup = BeautifulSoup(response.text, 'lxml')

  39.         # print(soup.title.text)  # 打印title

  40.         li_list = soup.find('ul', class_='listContent').find_all('li')
  41.         # print(li_list)
  42.         file = open('data.csv','w',encoding='utf-8')  # 文件名自己改改哈~
  43.         for li_quick in li_list:

  44.             try:

  45.                 # 取名称
  46.                 title = li_quick.find('div', class_='title').a.get_text().strip()

  47.                 # 取位置信息
  48.                 positionInfo = ','.join(li_quick.find('div', class_='positionInfo').get_text().replace('\n','').replace(' ','').replace('/','').split())

  49.             except:
  50.                 continue
  51.             finally:
  52.                 file.write(title+','+positionInfo+'\n')
  53.                 print(title, ',', positionInfo)
  54.         file.close()
复制代码
想知道小甲鱼最近在做啥?请访问 -> ilovefishc.com
回复

使用道具 举报

发表于 2020-8-3 16:12:50 | 显示全部楼层    本楼为最佳答案   


这样吧,直接写入 csv 了,代码中多余的空格也帮你替换掉了,试试看(注意:如果 Excel 打开出现乱码,那么通过笔记本打开即可):

  1. # -*- coding: utf-8 -*-
  2. """
  3. Created on Mon Aug  3 13:43:56 2020

  4. @author: Administrator
  5. """

  6. import os, re
  7. import requests
  8. import random
  9. import time
  10. from bs4 import BeautifulSoup
  11. import xlrd

  12. user_agent_list = [
  13.     "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",
  14.     "Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",
  15.     "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6",
  16.     "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6",
  17.     "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1",
  18.     "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5",
  19.     "Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5",
  20.     "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
  21.     "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
  22.     "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
  23.     "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
  24.     "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
  25.     "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
  26.     "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
  27.     "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
  28.     "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3",
  29.     "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
  30.     "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24"
  31. ]
  32. UA = random.choice(user_agent_list)  ##从self.user_agent_list中随机取出一个字符串
  33. headers = {'User-Agent': UA}  ##构造成一个完整的User-Agent (UA代表的是上面随机取出来的字符串哦)

  34. url = 'https://cd.ke.com/xiaoqu/damian/pg'

  35. for x in range(1, 3):

  36.     time.sleep(random.randint(2, 5))

  37.     with requests.get(url + str(x) + 'ddo22p7', headers=headers, timeout=5) as response:

  38.         soup = BeautifulSoup(response.text, 'lxml')

  39.         # print(soup.title.text)  # 打印title

  40.         li_list = soup.find('ul', class_='listContent').find_all('li')
  41.         # print(li_list)
  42.         file = open('data.csv','w',encoding='utf-8')  # 文件名自己改改哈~
  43.         for li_quick in li_list:

  44.             try:

  45.                 # 取名称
  46.                 title = li_quick.find('div', class_='title').a.get_text().strip()

  47.                 # 取位置信息
  48.                 positionInfo = ','.join(li_quick.find('div', class_='positionInfo').get_text().replace('\n','').replace(' ','').replace('/','').split())

  49.             except:
  50.                 continue
  51.             finally:
  52.                 file.write(title+','+positionInfo+'\n')
  53.                 print(title, ',', positionInfo)
  54.         file.close()
复制代码
想知道小甲鱼最近在做啥?请访问 -> ilovefishc.com
回复 支持 1 反对 0

使用道具 举报

 楼主| 发表于 2020-8-3 17:22:06 | 显示全部楼层
Twilight6 发表于 2020-8-3 16:12
这样吧,直接写入 csv 了,代码中多余的空格也帮你替换掉了,试试看(注意:如果 Excel 打开出现乱码, ...

太感谢你了,5星好评,但是还有最后一个问题请教你一下,我爬取的目标有62个,但是获取下来的资料怎么少了很多呢
想知道小甲鱼最近在做啥?请访问 -> ilovefishc.com
回复 支持 反对

使用道具 举报

发表于 2020-8-3 17:27:56 | 显示全部楼层
L嘉 发表于 2020-8-3 17:22
太感谢你了,5星好评,但是还有最后一个问题请教你一下,我爬取的目标有62个,但是获取下来的资料怎么少 ...

哈哈,我的错,我不小心把 打开文件放 for 循环内了,但是只爬到 49 个欸:

  1. # -*- coding: utf-8 -*-
  2. """
  3. Created on Mon Aug  3 13:43:56 2020

  4. @author: Administrator
  5. """

  6. import os, re
  7. import requests
  8. import random
  9. import time
  10. from bs4 import BeautifulSoup
  11. import xlrd

  12. user_agent_list = [
  13.     "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",
  14.     "Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",
  15.     "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6",
  16.     "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6",
  17.     "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1",
  18.     "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5",
  19.     "Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5",
  20.     "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
  21.     "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
  22.     "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
  23.     "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
  24.     "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
  25.     "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
  26.     "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
  27.     "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
  28.     "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3",
  29.     "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
  30.     "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24"
  31. ]
  32. UA = random.choice(user_agent_list)  ##从self.user_agent_list中随机取出一个字符串
  33. headers = {'User-Agent': UA}  ##构造成一个完整的User-Agent (UA代表的是上面随机取出来的字符串哦)

  34. url = 'https://cd.ke.com/xiaoqu/damian/pg'
  35. file = open('data.csv','w',encoding='utf-8')  # 文件名自己改改哈~
  36. for x in range(1, 3):

  37.     time.sleep(random.randint(2, 5))

  38.     with requests.get(url + str(x) + 'ddo22p7', headers=headers, timeout=5) as response:

  39.         soup = BeautifulSoup(response.text, 'lxml')

  40.         # print(soup.title.text)  # 打印title

  41.         li_list = soup.find('ul', class_='listContent').find_all('li')
  42.         # print(li_list)
  43.         for li_quick in li_list:
  44.             try:
  45.                 # 取名称
  46.                 title = li_quick.find('div', class_='title').a.get_text().strip()

  47.                 # 取位置信息
  48.                 positionInfo = ','.join(li_quick.find('div', class_='positionInfo').get_text().replace('\n','').replace(' ','').replace('/','').split())

  49.             except:
  50.                 continue
  51.             finally:
  52.                 file.write(title+','+positionInfo+'\n')
  53.                 print(title, ',', positionInfo)
  54. file.close()
复制代码
想知道小甲鱼最近在做啥?请访问 -> ilovefishc.com
回复 支持 1 反对 0

使用道具 举报

 楼主| 发表于 2020-8-3 19:55:44 | 显示全部楼层
Twilight6 发表于 2020-8-3 17:27
哈哈,我的错,我不小心把 打开文件放 for 循环内了,但是只爬到 49 个欸:

好的 感谢大神给我这个菜鸟教学

点评

客气了~  发表于 2020-8-3 19:56
想知道小甲鱼最近在做啥?请访问 -> ilovefishc.com
回复 支持 反对

使用道具 举报

您需要登录后才可以回帖 登录 | 立即注册

本版积分规则

小黑屋|手机版|Archiver|鱼C工作室 ( 粤ICP备18085999号-1 | 粤公网安备 44051102000585号)

GMT+8, 2024-4-25 12:11

Powered by Discuz! X3.4

© 2001-2023 Discuz! Team.

快速回复 返回顶部 返回列表