7
7
import signal
8
8
9
9
from twisted .internet import reactor , threads
10
+ from twisted .python import threadable
10
11
from w3lib .url import any_to_uri
11
12
12
13
from scrapy .item import BaseItem
@@ -25,24 +26,18 @@ class Shell(object):
25
26
relevant_classes = (BaseSpider , Request , Response , BaseItem , \
26
27
XPathSelector , Settings )
27
28
28
- def __init__ (self , crawler , update_vars = None , inthread = False , code = None ):
29
+ def __init__ (self , crawler , update_vars = None , code = None ):
29
30
self .crawler = crawler
30
31
self .update_vars = update_vars or (lambda x : None )
31
32
self .item_class = load_object (crawler .settings ['DEFAULT_ITEM_CLASS' ])
32
33
self .spider = None
33
- self .inthread = inthread
34
+ self .inthread = not threadable . isInIOThread ()
34
35
self .code = code
35
36
self .vars = {}
36
37
37
- def start (self , * a , ** kw ):
38
+ def start (self , url = None , request = None , response = None , spider = None ):
38
39
# disable accidental Ctrl-C key press from shutting down the engine
39
40
signal .signal (signal .SIGINT , signal .SIG_IGN )
40
- if self .inthread :
41
- return threads .deferToThread (self ._start , * a , ** kw )
42
- else :
43
- self ._start (* a , ** kw )
44
-
45
- def _start (self , url = None , request = None , response = None , spider = None ):
46
41
if url :
47
42
self .fetch (url , spider )
48
43
elif request :
0 commit comments