@@ -125,12 +125,12 @@ def get_callback_from_rules(self, response):
125
125
def set_spider (self , url , opts ):
126
126
if opts .spider :
127
127
try :
128
- self .spider = self .crawler .spiders .create (opts .spider , ** opts .spargs )
128
+ self .spider = self .pcrawler .spiders .create (opts .spider , ** opts .spargs )
129
129
except KeyError :
130
130
log .msg (format = 'Unable to find spider: %(spider)s' ,
131
131
level = log .ERROR , spider = opts .spider )
132
132
else :
133
- self .spider = create_spider_for_request (self .crawler .spiders , Request (url ), ** opts .spargs )
133
+ self .spider = create_spider_for_request (self .pcrawler .spiders , Request (url ), ** opts .spargs )
134
134
if not self .spider :
135
135
log .msg (format = 'Unable to find spider for: %(url)s' ,
136
136
level = log .ERROR , url = url )
@@ -139,8 +139,8 @@ def start_parsing(self, url, opts):
139
139
request = Request (url , opts .callback )
140
140
request = self .prepare_request (request , opts )
141
141
142
- self .crawler .crawl (self .spider , [request ])
143
- self .crawler .start ()
142
+ self .pcrawler .crawl (self .spider , [request ])
143
+ self .crawler_process .start ()
144
144
145
145
if not self .first_response :
146
146
log .msg (format = 'No response downloaded for: %(request)s' ,
@@ -174,7 +174,7 @@ def callback(response):
174
174
175
175
items , requests = self .run_callback (response , cb )
176
176
if opts .pipelines :
177
- itemproc = self .crawler .engine .scraper .itemproc
177
+ itemproc = self .pcrawler .engine .scraper .itemproc
178
178
for item in items :
179
179
itemproc .process_item (item , self .spider )
180
180
self .add_items (depth , items )
@@ -207,6 +207,7 @@ def run(self, args, opts):
207
207
url = args [0 ]
208
208
209
209
# prepare spider
210
+ self .pcrawler = self .crawler_process .create_crawler ()
210
211
self .set_spider (url , opts )
211
212
212
213
if self .spider and opts .depth > 0 :
0 commit comments