Skip to content

Commit d381a35

Browse files
Python 3 compatible syntax: print, except, raise, octal numbers; removed Python 2.2 boolean compatibility code in xlib/pydispatch/dispatcher.py
1 parent 2e8cc28 commit d381a35

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

46 files changed

+123
-129
lines changed

extras/scrapy-ws.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -29,9 +29,9 @@ def get_commands():
2929

3030
def cmd_help(args, opts):
3131
"""help - list available commands"""
32-
print "Available commands:"
32+
print("Available commands:")
3333
for _, func in sorted(get_commands().items()):
34-
print " ", func.__doc__
34+
print(" ", func.__doc__)
3535

3636
def cmd_stop(args, opts):
3737
"""stop <spider> - stop a running spider"""
@@ -40,29 +40,29 @@ def cmd_stop(args, opts):
4040
def cmd_list_running(args, opts):
4141
"""list-running - list running spiders"""
4242
for x in json_get(opts, 'crawler/engine/open_spiders'):
43-
print x
43+
print(x)
4444

4545
def cmd_list_available(args, opts):
4646
"""list-available - list name of available spiders"""
4747
for x in jsonrpc_call(opts, 'crawler/spiders', 'list'):
48-
print x
48+
print(x)
4949

5050
def cmd_list_resources(args, opts):
5151
"""list-resources - list available web service resources"""
5252
for x in json_get(opts, '')['resources']:
53-
print x
53+
print(x)
5454

5555
def cmd_get_spider_stats(args, opts):
5656
"""get-spider-stats <spider> - get stats of a running spider"""
5757
stats = jsonrpc_call(opts, 'stats', 'get_stats', args[0])
5858
for name, value in stats.items():
59-
print "%-40s %s" % (name, value)
59+
print("%-40s %s" % (name, value))
6060

6161
def cmd_get_global_stats(args, opts):
6262
"""get-global-stats - get global stats"""
6363
stats = jsonrpc_call(opts, 'stats', 'get_stats')
6464
for name, value in stats.items():
65-
print "%-40s %s" % (name, value)
65+
print("%-40s %s" % (name, value))
6666

6767
def get_wsurl(opts, path):
6868
return urljoin("http://%s:%s/"% (opts.host, opts.port), path)
@@ -101,12 +101,12 @@ def main():
101101
try:
102102
cmd(args, opts)
103103
except IndexError:
104-
print cmd.__doc__
105-
except JsonRpcError, e:
106-
print str(e)
104+
print(cmd.__doc__)
105+
except JsonRpcError as e:
106+
print(str(e))
107107
if e.data:
108-
print "Server Traceback below:"
109-
print e.data
108+
print("Server Traceback below:")
109+
print(e.data)
110110

111111

112112
if __name__ == '__main__':

scrapy/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import sys, os, warnings
99

1010
if sys.version_info < (2, 6):
11-
print "Scrapy %s requires Python 2.6 or above" % __version__
11+
print("Scrapy %s requires Python 2.6 or above" % __version__)
1212
sys.exit(1)
1313

1414
# ignore noisy twisted deprecation warnings

scrapy/cmdline.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -59,34 +59,34 @@ def _pop_command_name(argv):
5959

6060
def _print_header(settings, inproject):
6161
if inproject:
62-
print "Scrapy %s - project: %s\n" % (scrapy.__version__, \
63-
settings['BOT_NAME'])
62+
print("Scrapy %s - project: %s\n" % (scrapy.__version__, \
63+
settings['BOT_NAME']))
6464
else:
65-
print "Scrapy %s - no active project\n" % scrapy.__version__
65+
print("Scrapy %s - no active project\n" % scrapy.__version__)
6666

6767
def _print_commands(settings, inproject):
6868
_print_header(settings, inproject)
69-
print "Usage:"
70-
print " scrapy <command> [options] [args]\n"
71-
print "Available commands:"
69+
print("Usage:")
70+
print(" scrapy <command> [options] [args]\n")
71+
print("Available commands:")
7272
cmds = _get_commands_dict(settings, inproject)
7373
for cmdname, cmdclass in sorted(cmds.iteritems()):
74-
print " %-13s %s" % (cmdname, cmdclass.short_desc())
74+
print(" %-13s %s" % (cmdname, cmdclass.short_desc()))
7575
if not inproject:
76-
print
77-
print " [ more ] More commands available when run from project directory"
78-
print
79-
print 'Use "scrapy <command> -h" to see more info about a command'
76+
print()
77+
print(" [ more ] More commands available when run from project directory")
78+
print()
79+
print('Use "scrapy <command> -h" to see more info about a command')
8080

8181
def _print_unknown_command(settings, cmdname, inproject):
8282
_print_header(settings, inproject)
83-
print "Unknown command: %s\n" % cmdname
84-
print 'Use "scrapy" to see available commands'
83+
print("Unknown command: %s\n" % cmdname)
84+
print('Use "scrapy" to see available commands')
8585

8686
def _run_print_help(parser, func, *a, **kw):
8787
try:
8888
func(*a, **kw)
89-
except UsageError, e:
89+
except UsageError as e:
9090
if str(e):
9191
parser.error(str(e))
9292
if e.print_help:

scrapy/commands/check.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,9 +64,9 @@ def run(self, args, opts):
6464
# start checks
6565
if opts.list:
6666
for spider, methods in sorted(contract_reqs.iteritems()):
67-
print spider
67+
print(spider)
6868
for method in sorted(methods):
69-
print ' * %s' % method
69+
print(' * %s' % method)
7070
else:
7171
self.crawler_process.start()
7272
self.results.printErrors()

scrapy/commands/deploy.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ def run(self, args, opts):
7272

7373
if opts.list_targets:
7474
for name, target in _get_targets().items():
75-
print "%-20s %s" % (name, target['url'])
75+
print("%-20s %s" % (name, target['url']))
7676
return
7777

7878
if opts.list_projects:
@@ -81,7 +81,7 @@ def run(self, args, opts):
8181
_add_auth_header(req, target)
8282
f = urllib2.urlopen(req)
8383
projects = json.loads(f.read())['projects']
84-
print os.linesep.join(projects)
84+
print(os.linesep.join(projects))
8585
return
8686

8787
tmpdir = None
@@ -208,12 +208,12 @@ def _http_post(request):
208208
try:
209209
f = urllib2.urlopen(request)
210210
_log("Server response (%s):" % f.code)
211-
print f.read()
211+
print(f.read())
212212
return True
213-
except urllib2.HTTPError, e:
213+
except urllib2.HTTPError as e:
214214
_log("Deploy failed (%s):" % e.code)
215-
print e.read()
216-
except urllib2.URLError, e:
215+
print(e.read())
216+
except urllib2.URLError as e:
217217
_log("Deploy failed: %s" % e)
218218

219219
def _build_egg():

scrapy/commands/fetch.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,15 +30,15 @@ def add_options(self, parser):
3030
def _print_headers(self, headers, prefix):
3131
for key, values in headers.items():
3232
for value in values:
33-
print '%s %s: %s' % (prefix, key, value)
33+
print('%s %s: %s' % (prefix, key, value))
3434

3535
def _print_response(self, response, opts):
3636
if opts.headers:
3737
self._print_headers(response.request.headers, '>')
38-
print '>'
38+
print('>')
3939
self._print_headers(response.headers, '<')
4040
else:
41-
print response.body
41+
print(response.body)
4242

4343
def run(self, args, opts):
4444
if len(args) != 1 or not is_url(args[0]):

scrapy/commands/genspider.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ def run(self, args, opts):
4949
if opts.dump:
5050
template_file = self._find_template(opts.dump)
5151
if template_file:
52-
print open(template_file, 'r').read()
52+
print(open(template_file, 'r').read())
5353
return
5454
if len(args) != 2:
5555
raise UsageError()
@@ -58,7 +58,7 @@ def run(self, args, opts):
5858
module = sanitize_module_name(name)
5959

6060
if self.settings.get('BOT_NAME') == module:
61-
print "Cannot create a spider with the same name as your project"
61+
print("Cannot create a spider with the same name as your project")
6262
return
6363

6464
try:
@@ -69,8 +69,8 @@ def run(self, args, opts):
6969
else:
7070
# if spider already exists and not --force then halt
7171
if not opts.force:
72-
print "Spider %r already exists in module:" % name
73-
print " %s" % spider.__module__
72+
print("Spider %r already exists in module:" % name)
73+
print(" %s" % spider.__module__)
7474
return
7575
template_file = self._find_template(opts.template)
7676
if template_file:
@@ -94,22 +94,22 @@ def _genspider(self, module, name, domain, template_name, template_file):
9494
spider_file = "%s.py" % join(spiders_dir, module)
9595
shutil.copyfile(template_file, spider_file)
9696
render_templatefile(spider_file, **tvars)
97-
print "Created spider %r using template %r in module:" % (name, \
98-
template_name)
99-
print " %s.%s" % (spiders_module.__name__, module)
97+
print("Created spider %r using template %r in module:" % (name, \
98+
template_name))
99+
print(" %s.%s" % (spiders_module.__name__, module))
100100

101101
def _find_template(self, template):
102102
template_file = join(self.templates_dir, '%s.tmpl' % template)
103103
if exists(template_file):
104104
return template_file
105-
print "Unable to find template: %s\n" % template
106-
print 'Use "scrapy genspider --list" to see all available templates.'
105+
print("Unable to find template: %s\n" % template)
106+
print('Use "scrapy genspider --list" to see all available templates.')
107107

108108
def _list_templates(self):
109-
print "Available templates:"
109+
print("Available templates:")
110110
for filename in sorted(os.listdir(self.templates_dir)):
111111
if filename.endswith('.tmpl'):
112-
print " %s" % splitext(filename)[0]
112+
print(" %s" % splitext(filename)[0])
113113

114114
@property
115115
def templates_dir(self):

scrapy/commands/list.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,4 +11,4 @@ def short_desc(self):
1111
def run(self, args, opts):
1212
crawler = self.crawler_process.create_crawler()
1313
for s in crawler.spiders.list():
14-
print s
14+
print(s)

scrapy/commands/parse.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ def print_items(self, lvl=None, colour=True):
6868
else:
6969
items = self.items.get(lvl, [])
7070

71-
print "# Scraped Items ", "-"*60
71+
print("# Scraped Items ", "-"*60)
7272
display.pprint([dict(x) for x in items], colorize=colour)
7373

7474
def print_requests(self, lvl=None, colour=True):
@@ -81,21 +81,21 @@ def print_requests(self, lvl=None, colour=True):
8181
else:
8282
requests = self.requests.get(lvl, [])
8383

84-
print "# Requests ", "-"*65
84+
print("# Requests ", "-"*65)
8585
display.pprint(requests, colorize=colour)
8686

8787
def print_results(self, opts):
8888
colour = not opts.nocolour
8989

9090
if opts.verbose:
9191
for level in xrange(1, self.max_level+1):
92-
print '\n>>> DEPTH LEVEL: %s <<<' % level
92+
print('\n>>> DEPTH LEVEL: %s <<<' % level)
9393
if not opts.noitems:
9494
self.print_items(level, colour)
9595
if not opts.nolinks:
9696
self.print_requests(level, colour)
9797
else:
98-
print '\n>>> STATUS DEPTH LEVEL %s <<<' % self.max_level
98+
print('\n>>> STATUS DEPTH LEVEL %s <<<' % self.max_level)
9999
if not opts.noitems:
100100
self.print_items(colour=colour)
101101
if not opts.nolinks:

scrapy/commands/runspider.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ def run(self, args, opts):
6767
raise UsageError("File not found: %s\n" % filename)
6868
try:
6969
module = _import_file(filename)
70-
except (ImportError, ValueError), e:
70+
except (ImportError, ValueError) as e:
7171
raise UsageError("Unable to load %r: %s\n" % (filename, e))
7272
spclasses = list(iter_spider_classes(module))
7373
if not spclasses:

scrapy/commands/settings.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -27,12 +27,12 @@ def add_options(self, parser):
2727
def run(self, args, opts):
2828
settings = self.crawler_process.settings
2929
if opts.get:
30-
print settings.get(opts.get)
30+
print(settings.get(opts.get))
3131
elif opts.getbool:
32-
print settings.getbool(opts.getbool)
32+
print(settings.getbool(opts.getbool))
3333
elif opts.getint:
34-
print settings.getint(opts.getint)
34+
print(settings.getint(opts.getint))
3535
elif opts.getfloat:
36-
print settings.getfloat(opts.getfloat)
36+
print(settings.getfloat(opts.getfloat))
3737
elif opts.getlist:
38-
print settings.getlist(opts.getlist)
38+
print(settings.getlist(opts.getlist))

scrapy/commands/startproject.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,11 +36,11 @@ def run(self, args, opts):
3636
raise UsageError()
3737
project_name = args[0]
3838
if not re.search(r'^[_a-zA-Z]\w*$', project_name):
39-
print 'Error: Project names must begin with a letter and contain only\n' \
40-
'letters, numbers and underscores'
39+
print('Error: Project names must begin with a letter and contain only\n' \
40+
'letters, numbers and underscores')
4141
sys.exit(1)
4242
elif exists(project_name):
43-
print "Error: directory %r already exists" % project_name
43+
print("Error: directory %r already exists" % project_name)
4444
sys.exit(1)
4545

4646
moduletpl = join(TEMPLATES_PATH, 'module')

scrapy/commands/version.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -25,11 +25,11 @@ def run(self, args, opts):
2525
import lxml.etree
2626
lxml_version = ".".join(map(str, lxml.etree.LXML_VERSION))
2727
libxml2_version = ".".join(map(str, lxml.etree.LIBXML_VERSION))
28-
print "Scrapy : %s" % scrapy.__version__
29-
print "lxml : %s" % lxml_version
30-
print "libxml2 : %s" % libxml2_version
31-
print "Twisted : %s" % twisted.version.short()
32-
print "Python : %s" % sys.version.replace("\n", "- ")
33-
print "Platform: %s" % platform.platform()
28+
print("Scrapy : %s" % scrapy.__version__)
29+
print("lxml : %s" % lxml_version)
30+
print("libxml2 : %s" % libxml2_version)
31+
print("Twisted : %s" % twisted.version.short())
32+
print("Python : %s" % sys.version.replace("\n", "- "))
33+
print("Platform: %s" % platform.platform())
3434
else:
35-
print "Scrapy %s" % scrapy.__version__
35+
print("Scrapy %s" % scrapy.__version__)

scrapy/contrib/djangoitem.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -51,12 +51,12 @@ def _get_errors(self, exclude=None):
5151

5252
try:
5353
self.instance.clean_fields(exclude=exclude)
54-
except ValidationError, e:
54+
except ValidationError as e:
5555
self._errors = e.update_error_dict(self._errors)
5656

5757
try:
5858
self.instance.clean()
59-
except ValidationError, e:
59+
except ValidationError as e:
6060
self._errors = e.update_error_dict(self._errors)
6161

6262
# uniqueness is not checked, because it is faster to check it when

scrapy/contrib/loader/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ def get_output_value(self, field_name):
8181
proc = wrap_loader_context(proc, self.context)
8282
try:
8383
return proc(self._values[field_name])
84-
except Exception, e:
84+
except Exception as e:
8585
raise ValueError("Error with output processor: field=%r value=%r error='%s: %s'" % \
8686
(field_name, self._values[field_name], type(e).__name__, str(e)))
8787

scrapy/core/downloader/handlers/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ def __init__(self, crawler):
1818
cls = load_object(clspath)
1919
try:
2020
dh = cls(crawler.settings)
21-
except NotConfigured, ex:
21+
except NotConfigured as ex:
2222
self._notconfigured[scheme] = str(ex)
2323
else:
2424
self._handlers[scheme] = dh

scrapy/core/downloader/handlers/s3.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ def __init__(self, settings, aws_access_key_id=None, aws_secret_access_key=None,
4141

4242
try:
4343
self.conn = _S3Connection(aws_access_key_id, aws_secret_access_key)
44-
except Exception, ex:
44+
except Exception as ex:
4545
raise NotConfigured(str(ex))
4646
self._download_http = httpdownloadhandler(settings).download_request
4747

scrapy/core/engine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def _next_request(self, spider):
111111
request = slot.start_requests.next()
112112
except StopIteration:
113113
slot.start_requests = None
114-
except Exception, exc:
114+
except Exception as exc:
115115
log.err(None, 'Obtaining request from start requests', \
116116
spider=spider)
117117
else:

0 commit comments

Comments
 (0)