Skip to content

Commit 65fc2fb

Browse files
committed
Set CONCURRENT_SPIDERS=1 in Scrapyd to force one spider per process
1 parent b1c8950 commit 65fc2fb

File tree

2 files changed

+2
-0
lines changed

2 files changed

+2
-0
lines changed

scrapyd/environ.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ def get_environment(self, message, slot):
3030
dbpath = os.path.join(self.dbs_dir, '%s.db' % project)
3131
env['SCRAPY_SQLITE_DB'] = dbpath
3232
env['SCRAPY_LOG_FILE'] = self._get_log_file(message)
33+
env['SCRAPY_CONCURRENT_SPIDERS'] = '1' # scrapyd runs one spider per process
3334
return env
3435

3536
def _get_log_file(self, message):

scrapyd/tests/test_environ.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ def test_get_environment_with_eggfile(self):
2929
self.assertEqual(env['SCRAPY_SLOT'], '3')
3030
self.assertEqual(env['SCRAPY_SPIDER'], 'myspider')
3131
self.assertEqual(env['SCRAPY_JOB'], 'ID')
32+
self.assertEqual(env['SCRAPY_CONCURRENT_SPIDERS'], '1')
3233
self.assert_(env['SCRAPY_SQLITE_DB'].endswith('mybot.db'))
3334
self.assert_(env['SCRAPY_LOG_FILE'].endswith('/mybot/myspider/ID.log'))
3435
self.failIf('SCRAPY_SETTINGS_MODULE' in env)

0 commit comments

Comments
 (0)