Я продолжаю получать следующую ошибку при попытке запустить мою программу scrapy: TypeError: этот конструктор не принимает аргументов. Я искал вокруг, но я не могу найти ничего, чтобы помочь решить мою проблему. Я не знаю, есть ли у меня опечатка, которую я пропускаю, или что-то подобное, но любая помощь будет принята с благодарностью. Прикрепленный код - это часть файла middlewares.py, сообщающая об ошибке, а не весь файл.
# middlewares.py
import random
from copy import copy
class ProfileMiddleware:
@classmethod
def from_crawler(cls, crawler, *args, **kwargs):
mw = cls(crawler, *args, **kwargs)
crawler.signals.connect(mw.spider_opened, signal=signals.spider_opened)
mw.settings = crawler.settings
return mw
def spider_opened(self, spider, **kwargs):
proxies = self.settings.getlist('PROXIES')
user_agents = self.settings.getlist('USER_AGENTS')
self.profiles = list(zip(proxies, user_agents))
self.queue = copy(self.profiles)
random.shuffle(self.queue)
def process_request(self, request, spider):
if not self.queue:
self.queue = copy(self.profiles)
random.shuffle(self.queue)
profile = self.queue.pop()
request.headers['User-Agent'] = profile[1]
request.meta['proxy'] = profile[0]
Код ошибки:
2019-05-23 10:10:51 [twisted] CRITICAL: Unhandled error in Deferred:
Traceback (most recent call last):
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/crawler.py", line 172, in crawl
return self._crawl(crawler, *args, **kwargs)
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/crawler.py", line 176, in _crawl
d = crawler.crawl(*args, **kwargs)
File "/home/trno224/.local/lib/python2.7/site-packages/twisted/internet/defer.py", line 1613, in unwindGenerator
return _cancellableInlineCallbacks(gen)
File "/home/trno224/.local/lib/python2.7/site-packages/twisted/internet/defer.py", line 1529, in _cancellableInlineCallbacks
_inlineCallbacks(None, g, status)
--- <exception caught here> ---
File "/home/trno224/.local/lib/python2.7/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks
result = g.send(result)
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/crawler.py", line 98, in crawl
six.reraise(*exc_info)
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/crawler.py", line 80, in crawl
self.engine = self._create_engine()
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/crawler.py", line 105, in _create_engine
return ExecutionEngine(self, lambda _: self.stop())
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/core/engine.py", line 69, in __init__
self.downloader = downloader_cls(crawler)
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/core/downloader/__init__.py", line 88, in __init__
self.middleware = DownloaderMiddlewareManager.from_crawler(crawler)
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/middleware.py", line 53, in from_crawler
return cls.from_settings(crawler.settings, crawler)
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/middleware.py", line 35, in from_settings
mw = create_instance(mwcls, settings, crawler)
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/utils/misc.py", line 140, in create_instance
return objcls.from_crawler(crawler, *args, **kwargs)
File "/home/trno224/scrapy_splash/scrapy_javascript/scrapy_javascript/middlewares.py", line 109, in from_crawler
mw = cls(crawler, *args, **kwargs)
exceptions.TypeError: this constructor takes no arguments
2019-05-23 10:10:51 [twisted] CRITICAL:
Traceback (most recent call last):
File "/home/trno224/.local/lib/python2.7/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks
result = g.send(result)
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/crawler.py", line 98, in crawl
six.reraise(*exc_info)
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/crawler.py", line 80, in crawl
self.engine = self._create_engine()
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/crawler.py", line 105, in _create_engine
return ExecutionEngine(self, lambda _: self.stop())
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/core/engine.py", line 69, in __init__
self.downloader = downloader_cls(crawler)
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/core/downloader/__init__.py", line 88, in __init__
self.middleware = DownloaderMiddlewareManager.from_crawler(crawler)
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/middleware.py", line 53, in from_crawler
return cls.from_settings(crawler.settings, crawler)
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/middleware.py", line 35, in from_settings
mw = create_instance(mwcls, settings, crawler)
File "/home/trno224/.local/lib/python2.7/site-packages/scrapy/utils/misc.py", line 140, in create_instance
return objcls.from_crawler(crawler, *args, **kwargs)
File "/home/trno224/scrapy_splash/scrapy_javascript/scrapy_javascript/middlewares.py", line 109, in from_crawler
mw = cls(crawler, *args, **kwargs)
TypeError: this constructor takes no arguments