Skip to content

Commit 6b35166

Browse files
Converted map() to list comprehension or generator for Python 3 compatibility
1 parent 911c808 commit 6b35166

File tree

7 files changed

+17
-13
lines changed

7 files changed

+17
-13
lines changed

scrapy/contrib/downloadermiddleware/cookies.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ def _get_request_cookies(self, jar, request):
8181
else:
8282
cookie_list = request.cookies
8383

84-
cookies = map(self._format_cookie, cookie_list)
84+
cookies = [self._format_cookie(x) for x in cookie_list]
8585
headers = {'Set-Cookie': cookies}
8686
response = Response(request.url, headers=headers)
8787

scrapy/contrib/httpcache.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ class DummyPolicy(object):
1515

1616
def __init__(self, settings):
1717
self.ignore_schemes = settings.getlist('HTTPCACHE_IGNORE_SCHEMES')
18-
self.ignore_http_codes = map(int, settings.getlist('HTTPCACHE_IGNORE_HTTP_CODES'))
18+
self.ignore_http_codes = [int(x) for x in settings.getlist('HTTPCACHE_IGNORE_HTTP_CODES')]
1919

2020
def should_cache_request(self, request):
2121
return urlparse_cached(request).scheme not in self.ignore_schemes

scrapy/telnet.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ def __init__(self, crawler):
3434
raise NotConfigured
3535
self.crawler = crawler
3636
self.noisy = False
37-
self.portrange = map(int, crawler.settings.getlist('TELNETCONSOLE_PORT'))
37+
self.portrange = [int(x) for x in crawler.settings.getlist('TELNETCONSOLE_PORT')]
3838
self.host = crawler.settings['TELNETCONSOLE_HOST']
3939
self.crawler.signals.connect(self.start_listening, signals.engine_started)
4040
self.crawler.signals.connect(self.stop_listening, signals.engine_stopped)

scrapy/tests/test_contracts.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ def test_contracts(self):
9191
# extract contracts correctly
9292
contracts = self.conman.extract_contracts(spider.returns_request)
9393
self.assertEqual(len(contracts), 2)
94-
self.assertEqual(frozenset(map(type, contracts)),
94+
self.assertEqual(frozenset(type(x) for x in contracts),
9595
frozenset([UrlContract, ReturnsContract]))
9696

9797
# returns request for valid method
@@ -109,13 +109,13 @@ def test_returns(self):
109109
# returns_item
110110
request = self.conman.from_method(spider.returns_item, self.results)
111111
output = request.callback(response)
112-
self.assertEqual(map(type, output), [TestItem])
112+
self.assertEqual([type(x) for x in output], [TestItem])
113113
self.should_succeed()
114114

115115
# returns_request
116116
request = self.conman.from_method(spider.returns_request, self.results)
117117
output = request.callback(response)
118-
self.assertEqual(map(type, output), [Request])
118+
self.assertEqual([type(x) for x in output], [Request])
119119
self.should_succeed()
120120

121121
# returns_fail
@@ -130,7 +130,7 @@ def test_scrapes(self):
130130
# scrapes_item_ok
131131
request = self.conman.from_method(spider.scrapes_item_ok, self.results)
132132
output = request.callback(response)
133-
self.assertEqual(map(type, output), [TestItem])
133+
self.assertEqual([type(x) for x in output], [TestItem])
134134
self.should_succeed()
135135

136136
# scrapes_item_fail

scrapy/tests/test_dependencies.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ def test_required_openssl_version(self):
88
raise unittest.SkipTest("OpenSSL is not available")
99

1010
if hasattr(module, '__version__'):
11-
installed_version = map(int, module.__version__.split('.')[:2])
11+
installed_version = [int(x) for x in module.__version__.split('.')[:2]]
1212
assert installed_version >= [0, 6], "OpenSSL >= 0.6 required"
1313

1414
if __name__ == "__main__":

scrapy/tests/test_pipeline_files.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -63,12 +63,14 @@ def test_file_not_expired(self):
6363
mock.patch.object(FilesPipeline, 'get_media_requests',
6464
return_value=[_prepare_request_object(item_url)])
6565
]
66-
map(lambda p: p.start(), patchers)
66+
for p in patchers:
67+
p.start()
6768

6869
result = yield self.pipeline.process_item(item, None)
6970
self.assertEqual(result['files'][0]['checksum'], 'abc')
7071

71-
map(lambda p: p.stop(), patchers)
72+
for p in patchers:
73+
p.stop()
7274

7375
@defer.inlineCallbacks
7476
def test_file_expired(self):
@@ -82,12 +84,14 @@ def test_file_expired(self):
8284
return_value=[_prepare_request_object(item_url)]),
8385
mock.patch.object(FilesPipeline, 'inc_stats', return_value=True)
8486
]
85-
map(lambda p: p.start(), patchers)
87+
for p in patchers:
88+
p.start()
8689

8790
result = yield self.pipeline.process_item(item, None)
8891
self.assertNotEqual(result['files'][0]['checksum'], 'abc')
8992

90-
map(lambda p: p.stop(), patchers)
93+
for p in patchers:
94+
p.stop()
9195

9296
class FilesPipelineTestCaseFields(unittest.TestCase):
9397

scrapy/webservice.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ def __init__(self, crawler):
6969
raise NotConfigured
7070
self.crawler = crawler
7171
logfile = crawler.settings['WEBSERVICE_LOGFILE']
72-
self.portrange = map(int, crawler.settings.getlist('WEBSERVICE_PORT'))
72+
self.portrange = [int(x) for x in crawler.settings.getlist('WEBSERVICE_PORT')]
7373
self.host = crawler.settings['WEBSERVICE_HOST']
7474
root = RootResource(crawler)
7575
reslist = build_component_list(crawler.settings['WEBSERVICE_RESOURCES_BASE'], \

0 commit comments

Comments
 (0)