|
楼主 |
发表于 2023-2-5 23:36:25
|
显示全部楼层
Traceback (most recent call last):
File "E:\python\Lib\site-packages\scrapy\utils\defer.py", line 240, in iter_errback
yield next(it)
^^^^^^^^
File "E:\python\Lib\site-packages\scrapy\utils\python.py", line 338, in __next__
return next(self.data)
^^^^^^^^^^^^^^^
File "E:\python\Lib\site-packages\scrapy\utils\python.py", line 338, in __next__
return next(self.data)
^^^^^^^^^^^^^^^
File "E:\python\Lib\site-packages\scrapy\core\spidermw.py", line 79, in process_sync
for r in iterable:
File "E:\python\Lib\site-packages\scrapy\spidermiddlewares\offsite.py", line 29, in <genexpr>
return (r for r in result or () if self._filter(r, spider))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\python\Lib\site-packages\scrapy\core\spidermw.py", line 79, in process_sync
for r in iterable:
File "E:\pycharmprojects\pythonproject\爬取纵横\zh\zh\middlewares.py", line 36, in process_spider_output
for i in result:
File "E:\python\Lib\site-packages\scrapy\core\spidermw.py", line 79, in process_sync
for r in iterable:
File "E:\python\Lib\site-packages\scrapy\spidermiddlewares\referer.py", line 336, in <genexpr>
return (self._set_referer(r, response) for r in result or ())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\python\Lib\site-packages\scrapy\core\spidermw.py", line 79, in process_sync
for r in iterable:
File "E:\python\Lib\site-packages\scrapy\spidermiddlewares\urllength.py", line 28, in <genexpr>
return (r for r in result or () if self._filter(r, spider))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\python\Lib\site-packages\scrapy\core\spidermw.py", line 79, in process_sync
for r in iterable:
File "E:\python\Lib\site-packages\scrapy\spidermiddlewares\depth.py", line 32, in <genexpr>
return (r for r in result or () if self._filter(r, response, spider))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\python\Lib\site-packages\scrapy\core\spidermw.py", line 79, in process_sync
for r in iterable:
File "E:\pycharmprojects\pythonproject\爬取纵横\zh\zh\spiders\zhpq.py", line 31, in parse_two
yield scrapy.Request(two_url,callback = self.parse_three,headers = headers )
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\python\Lib\site-packages\scrapy\http\request\__init__.py", line 60, in __init__
self._set_url(url)
File "E:\python\Lib\site-packages\scrapy\http\request\__init__.py", line 98, in _set_url
raise TypeError(f"Request url must be str, got {type(url).__name__}")
TypeError: Request url must be str, got list
2023-02-05 23:31:50 [scrapy.core.engine] INFO: Closing spider (finished)
2023-02-05 23:31:50 [scrapy.statscollectors] INFO: Dumping Scrapy stats:
{'downloader/request_bytes': 693,
'downloader/request_count': 2,
'downloader/request_method_count/GET': 2,
'downloader/response_bytes': 28209,
'downloader/response_count': 2,
'downloader/response_status_count/200': 2,
'elapsed_time_seconds': 0.758224,
'finish_reason': 'finished',
'finish_time': datetime.datetime(2023, 2, 5, 15, 31, 50, 689839),
'httpcompression/response_bytes': 192444,
'httpcompression/response_count': 2,
'log_count/DEBUG': 9,
'log_count/ERROR': 1,
'log_count/INFO': 12,
'request_depth_max': 1,
'response_received_count': 2,
'scheduler/dequeued': 2,
'scheduler/dequeued/memory': 2,
'scheduler/enqueued': 2,
'scheduler/enqueued/memory': 2,
'spider_exceptions/TypeError': 1,
'start_time': datetime.datetime(2023, 2, 5, 15, 31, 49, 931615)}
2023-02-05 23:31:50 [scrapy.core.engine] INFO: Spider closed (finished)
这是TraceBack部分的 |
|