马上注册,结交更多好友,享用更多功能^_^
您需要 登录 才可以下载或查看,没有账号?立即注册
x
爬完知乎才知道我真是太年轻了,10条消息的ajax 100多k,提取数据用了我几个小时
在提取数据的时候,开始用的json.loads变为字典,找相应的键真的是累死我了,找完之后发现报错,有的没有这个键。
然后我改成正则,提取发现也少了好多数据,请问这是什么原因?当得到庞大的json数据应该怎么提取有用的信息,
求大佬们解答。
还有返回的内容写入文件中文全变成Unicode编码,中文都是\uxxxx这种格式,这怎么处理,
开始 https://www.zhihu.com/search?typ ... =%E5%A6%B9%E5%AD%90
后面的内容是ajax加载,
获取ajax代码:import requests
headers = {
'accept-language':'zh-CN,zh;q=0.8',
'cache-control':'no-cache',
'cookie':'_zap=48d31444-3981-480e-8aab-485adebe718f; _xsrf=RTTautSHxIPCh2iSXTI4KrPGKnGCoAdT; d_c0="ANBohrrhdA6PTm8vXMF6K91H1Onr26TDCHk=|1541141970"; capsion_ticket="2|1:0|10:1541141989|14:capsion_ticket|44:ZDc5ZWE2MzFmMGEzNDUzMmJkOGRiMjI3NmNjMzkwZTg=|680c418d58e5fda2affa84ca509050390205bd537a7bdab551777d74977bfe2e"; z_c0="2|1:0|10:1541142041|4:z_c0|92:Mi4xbzBZREJ3QUFBQUFBMEdpR3V1RjBEaVlBQUFCZ0FsVk5HVVRKWEFEbmIwbmNkVDRzYWRLWm9fLVpiMVVOLVotRUZR|79bbfd843f03b2960de60c4114f53ae7e9393823f2f29a39156a95fac40b4797"; q_c1=e0001bf105d144dfb71560f4f94870c5|1541142043000|1541142043000; tst=r; __utma=155987696.468016062.1541144914.1541144914.1541144914.1; __utmc=155987696; __utmz=155987696.1541144914.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none); tgw_l7_route=ec452307db92a7f0fdb158e41da8e5d8',
'pragma':'no-cache',
'referer':'https://www.zhihu.com/search?type=content&q=%E5%A6%B9%E5%AD%90',
'user-agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.61 Safari/537.36',
'x-ab-param':'top_sjre=0;top_vd_gender=0;tp_discussion_feed_card_type=0;pin_ef=a;top_quality=0;top_sj=2;se_gemini_service=content;top_vds_alb_pos=0;top_fqa=0;top_tagore=1;top_tr=0;se_consulting_price=n;top_nid=0;top_fqai=0;top_nmt=0;top_video_score=1;tp_write_pin_guide=3;top_pfq=2;top_tuner_refactor=-1;top_user_gift=0;se_gi=0;top_videos_priority=-1;se_daxuechuisou=new;top_hweb=0;top_nszt=0;top_video_rew=0;top_mlt_model=0;top_billpic=0;top_f_r_nb=1;top_local=1;top_video_fix_position=2;top_no_weighing=1;top_recall=1;top_new_user_gift=0;top_root_web=0;se_minor_onebox=d;tp_discussion_feed_type_android=0;top_is_gr=0;top_recall_tb_short=61;top_feedre_itemcf=32;top_keyword=0;se_new_market_search=off;top_billab=1;top_billupdate1=3;top_login_card=1;se_auto_syn=0;top_alt=0;top_nucc=3;se_entity=on;top_30=0;top_uit=0;top_feedre_rtt=41;tp_ios_topic_write_pin_guide=1;top_free_content=-1;top_recall_tb_long=51;top_root=1;pin_efs=orig;top_v_album=1;top_vdio_rew=0;se_merger=1;se_rescore=1;top_lowup=1;top_recall_tb=5;top_billboard_count=1;top_yc=0;se_tf=1;top_feedre_cpt=101;se_ingress=on;top_gr_model=0;top_card=-1;top_topic_feedre=21;top_gif=0;top_retag=0;top_vd_op=0;ls_new_video=0;top_test_4_liguangyi=1;top_feedre=1;top_follow_reason=0;top_promo=1;top_dtmt=2;top_raf=n;se_dt=1;se_cm=0;top_root_few_topic=0;top_spec_promo=1;se_product_rank_list=0;top_ab_validate=2;top_billread=1;top_newfollow=0;top_nuc=0;zr_ans_rec=gbrank;se_relevant_query=old;top_slot_ad_pos=1;top_gr_auto_model=0;top_ntr=1;top_recall_deep_user=1;top_multi_model=0;top_rank=3;se_correct_ab=0;se_major_onebox=major;top_ebook=0;top_recall_core_interest=81;top_ad_slot=1;top_memberfree=1;top_billvideo=0;top_recommend_topic_card=0;top_an=0;top_manual_tag=1;se_consulting_switch=off;top_ac_merge=0;top_tffrt=0;top_tagore_topic=0;top_tmt=0;se_wiki_box=1;top_hca=0;top_hqt=9;top_keywordab=0;top_feedtopiccard=0;top_newfollowans=0;top_adpar=0;top_cc_at=1;top_recall_follow_user=91;top_recall_tb_follow=71;top_retagg=0;top_followtop=1;top_tag_isolation=0;top_universalebook=1;top_yhgc=1;se_refactored_search_index=0;top_gr_topic_reweight=0;top_bill=0;se_dl=0;tp_sft=a;ls_play_continuous_order=2;top_roundtable=1;top_wonderful=1;top_root_ac=1;top_root_mg=1;top_distinction=0;top_follow_question_hybrid=0;top_nad=1',
'x-api-version':'3.0.91',
'x-app-za':'OS=Web',
'x-requested-with':'fetch',
'x-udid':'ANBohrrhdA6PTm8vXMF6K91H1Onr26TDCHk='
}
url = 'https://www.zhihu.com/api/v4/search_v3'
params={
't':'general',
'q':'妹子',
'correction':'1',
'offset':'85',
'limit':'10',
'show_all_topics':'0',
'search_hash_id':'8cc6b90599e097ccf95500482c8a27cf'
}
resp = requests.get(url, params=params, headers=headers)
with open('ajax.txt', 'w', encoding='utf-8') as fw:
fw.write(resp.text)
正则代码:# -*- coding: utf-8 -*-
"""
Created on Fri Nov 2 16:39:21 2018
@author: python
"""
import json
import re
with open('ajax.txt', encoding='utf-8') as fr:
ajax = fr.read()
discuss = re.findall(r'comment_count[\s\S]*?(\d+)',ajax)
print(discuss)
voteup_count = re.findall(r'voteup_count[\s\S]*?(\d+)', ajax)
print(voteup_count)
question = re.findall(r'question\s*"\s*:\s*(\{[\s\S]*?\}),',ajax)
#print(question)
print('====')
for i in question:
print(json.loads(i)['id'])
# "question": {"url": "https://api.zhihu.com/questions/62804725", "type": "question", "id": "62804725", "name": "\u600e\u4e48\u52fe\u642d\u6f2b\u5c55\u4e0a\u7684<em>\u59b9\u5b50<\/em>\uff1f
id_ = re.findall(r'"https://api.zhihu.com/questions/.+?"',ajax)
print(id_)
title = re.findall(r'title":\s*"([\s\S]*?)"',ajax)
for i in title:
print(i.encode('utf-8').decode('unicode_escape'))
本帖最后由 水瓶座 于 2018-11-2 21:15 编辑
找键的话,可以当还是json的时候去→https://tool.lu/json/←解析成树状图,之后就so easy了
至于\uxxxx的编码,老实说我也不知道是什么编码,不过,用易语言转换后是这样的↓
|