File tree Expand file tree Collapse file tree 3 files changed +7
-12
lines changed Expand file tree Collapse file tree 3 files changed +7
-12
lines changed Original file line number Diff line number Diff line change @@ -167,15 +167,13 @@ def do_process(url):
167
167
The main crawler.
168
168
"""
169
169
def crawler (url ):
170
- if menu .options .crawldepth > 0 :
171
- menu .options .DEFAULT_CRAWLDEPTH_LEVEL = menu .options .crawldepth
172
170
if not menu .options .sitemap_url :
173
- if menu .options .DEFAULT_CRAWLDEPTH_LEVEL > 2 :
174
- err_msg = "Depth level '" + str (menu .options .DEFAULT_CRAWLDEPTH_LEVEL ) + "' is not a valid."
171
+ if menu .options .crawldepth > 2 :
172
+ err_msg = "Depth level '" + str (menu .options .crawldepth ) + "' is not a valid."
175
173
print (settings .print_error_msg (err_msg ))
176
174
raise SystemExit ()
177
175
info_msg = "Starting crawler and searching for "
178
- info_msg += "links with depth " + str (menu .options .DEFAULT_CRAWLDEPTH_LEVEL ) + "."
176
+ info_msg += "links with depth " + str (menu .options .crawldepth ) + "."
179
177
print (settings .print_info_msg (info_msg ))
180
178
else :
181
179
while True :
@@ -207,7 +205,7 @@ def crawler(url):
207
205
print (settings .print_error_msg (err_msg ))
208
206
pass
209
207
else :
210
- menu .options .DEFAULT_CRAWLDEPTH_LEVEL = message
208
+ menu .options .crawldepth = message
211
209
break
212
210
213
211
while True :
@@ -250,7 +248,7 @@ def crawler(url):
250
248
251
249
if not sitemap_check :
252
250
output_href = do_process (url )
253
- if menu .options .DEFAULT_CRAWLDEPTH_LEVEL > 1 :
251
+ if menu .options .crawldepth > 1 :
254
252
for url in output_href :
255
253
output_href = do_process (url )
256
254
if SKIPPED_URLS == 0 :
Original file line number Diff line number Diff line change @@ -162,7 +162,7 @@ def banner():
162
162
default = 0 ,
163
163
dest = "crawldepth" ,
164
164
type = "int" ,
165
- help = "Crawl the website starting from the target URL (1-2, Default: " + str ( settings . DEFAULT_CRAWLDEPTH_LEVEL ) + " )." )
165
+ help = "Crawl the website starting from the target URL (1-2, Default: 0 )." )
166
166
167
167
target .add_option ("-x" ,
168
168
dest = "sitemap_url" ,
Original file line number Diff line number Diff line change @@ -216,7 +216,7 @@ def sys_argv_errors():
216
216
DESCRIPTION = "The command injection exploiter"
217
217
AUTHOR = "Anastasios Stasinopoulos"
218
218
VERSION_NUM = "3.3"
219
- REVISION = "35 "
219
+ REVISION = "36 "
220
220
STABLE_RELEASE = False
221
221
if STABLE_RELEASE :
222
222
VERSION = "v" + VERSION_NUM + "-stable"
@@ -939,9 +939,6 @@ def sys_argv_errors():
939
939
# Target URL reload
940
940
URL_RELOAD = False
941
941
942
- # Crawl the website starting from the target URL.
943
- DEFAULT_CRAWLDEPTH_LEVEL = 0
944
-
945
942
# Command history
946
943
CLI_HISTORY = ""
947
944
You can’t perform that action at this time.
0 commit comments