We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 16d9a33 commit 74413ffCopy full SHA for 74413ff
scrapy/core/engine.py
@@ -130,10 +130,10 @@ def has_capacity(self):
130
return len(self.downloader.sites) < self.downloader.concurrent_spiders
131
132
def crawl(self, request, spider):
133
- assert spider in self.open_spiders, \
134
- "Spider %r not opened when crawling: %s" % (spider.name, request)
135
if spider in self.closing: # ignore requests for spiders being closed
136
return
+ assert spider in self.open_spiders, \
+ "Spider %r not opened when crawling: %s" % (spider.name, request)
137
schd = mustbe_deferred(self.schedule, request, spider)
138
# FIXME: we can't log errors because we would be preventing them from
139
# propagating to the request errback. This should be fixed after the
0 commit comments