Skip to content

Commit 2318c56

Browse files
committed
shutdown the active crawler on SIGINT. fixes scrapy#450
1 parent f80f10a commit 2318c56

File tree

1 file changed

+16
-15
lines changed

1 file changed

+16
-15
lines changed

scrapy/crawler.py

Lines changed: 16 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -80,6 +80,7 @@ def __init__(self, settings):
8080
self.settings = settings
8181
self.crawlers = {}
8282
self.stopping = False
83+
self._started = None
8384

8485
def create_crawler(self, name=None):
8586
if name not in self.crawlers:
@@ -94,8 +95,8 @@ def start(self):
9495
@defer.inlineCallbacks
9596
def stop(self):
9697
self.stopping = True
97-
for crawler in self.crawlers.itervalues():
98-
yield crawler.stop()
98+
if self._active_crawler:
99+
yield self._active_crawler.stop()
99100

100101
def _signal_shutdown(self, signum, _):
101102
install_shutdown_handlers(self._signal_kill)
@@ -129,20 +130,20 @@ def start_reactor(self):
129130
reactor.run(installSignalHandlers=False) # blocking call
130131

131132
def _start_crawler(self):
132-
if self.crawlers and not self.stopping:
133-
name, crawler = self.crawlers.popitem()
134-
135-
sflo = log.start_from_crawler(crawler)
136-
crawler.configure()
137-
crawler.install()
138-
crawler.signals.connect(crawler.uninstall, signals.engine_stopped)
139-
if sflo:
140-
crawler.signals.connect(sflo.stop, signals.engine_stopped)
141-
142-
crawler.signals.connect(self._check_done, signals.engine_stopped)
143-
crawler.start()
133+
if not self.crawlers or self.stopping:
134+
return
144135

145-
return name, crawler
136+
name, crawler = self.crawlers.popitem()
137+
self._active_crawler = crawler
138+
sflo = log.start_from_crawler(crawler)
139+
crawler.configure()
140+
crawler.install()
141+
crawler.signals.connect(crawler.uninstall, signals.engine_stopped)
142+
if sflo:
143+
crawler.signals.connect(sflo.stop, signals.engine_stopped)
144+
crawler.signals.connect(self._check_done, signals.engine_stopped)
145+
crawler.start()
146+
return name, crawler
146147

147148
def _check_done(self, **kwargs):
148149
if not self._start_crawler():

0 commit comments

Comments
 (0)