Skip to content

Commit b78e761

Browse files
committed
Merge pull request scrapy#445 from darkrho/import-module
Use `importlib.import_module` instead of `__import__`
2 parents 2318c56 + d1b9128 commit b78e761

File tree

9 files changed

+26
-13
lines changed

9 files changed

+26
-13
lines changed

scrapy/commands/genspider.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22
import os
33
import shutil
44
import string
5+
6+
from importlib import import_module
57
from os.path import join, dirname, abspath, exists, splitext
68

79
import scrapy
@@ -90,7 +92,7 @@ def _genspider(self, module, name, domain, template_name, template_file):
9092
'classname': '%sSpider' % ''.join([s.capitalize() \
9193
for s in module.split('_')])
9294
}
93-
spiders_module = __import__(self.settings['NEWSPIDER_MODULE'], {}, {}, [''])
95+
spiders_module = import_module(self.settings['NEWSPIDER_MODULE'])
9496
spiders_dir = abspath(dirname(spiders_module.__file__))
9597
spider_file = "%s.py" % join(spiders_dir, module)
9698
shutil.copyfile(template_file, spider_file)

scrapy/commands/runspider.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import sys
22
import os
3+
from importlib import import_module
34

45
from scrapy.utils.spider import iter_spider_classes
56
from scrapy.command import ScrapyCommand
@@ -15,7 +16,7 @@ def _import_file(filepath):
1516
if dirname:
1617
sys.path = [dirname] + sys.path
1718
try:
18-
module = __import__(fname, {}, {}, [''])
19+
module = import_module(fname)
1920
finally:
2021
if dirname:
2122
sys.path.pop(0)

scrapy/contrib/httpcache.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from __future__ import print_function
22
import os
33
import cPickle as pickle
4+
from importlib import import_module
45
from time import time
56
from weakref import WeakKeyDictionary
67
from email.utils import mktime_tz, parsedate_tz
@@ -164,7 +165,7 @@ class DbmCacheStorage(object):
164165
def __init__(self, settings):
165166
self.cachedir = data_path(settings['HTTPCACHE_DIR'], createdir=True)
166167
self.expiration_secs = settings.getint('HTTPCACHE_EXPIRATION_SECS')
167-
self.dbmodule = __import__(settings['HTTPCACHE_DBM_MODULE'], {}, {}, [''])
168+
self.dbmodule = import_module(settings['HTTPCACHE_DBM_MODULE'])
168169
self.db = None
169170

170171
def open_spider(self, spider):

scrapy/contrib/memusage.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66

77
import socket
88
from pprint import pformat
9+
from importlib import import_module
910

1011
from twisted.internet import task
1112

@@ -20,7 +21,8 @@ def __init__(self, crawler):
2021
if not crawler.settings.getbool('MEMUSAGE_ENABLED'):
2122
raise NotConfigured
2223
try:
23-
self.resource = __import__('resource')
24+
# stdlib's resource module is only availabe on unix platforms.
25+
self.resource = import_module('resource')
2426
except ImportError:
2527
raise NotConfigured
2628

scrapy/settings/default_settings.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515

1616
import os
1717
import sys
18+
from importlib import import_module
1819
from os.path import join, abspath, dirname
1920

2021
BOT_NAME = 'scrapybot'
@@ -229,7 +230,7 @@
229230

230231
URLLENGTH_LIMIT = 2083
231232

232-
USER_AGENT = 'Scrapy/%s (+http://scrapy.org)' % __import__('scrapy').__version__
233+
USER_AGENT = 'Scrapy/%s (+http://scrapy.org)' % import_module('scrapy').__version__
233234

234235
TELNETCONSOLE_ENABLED = 1
235236
TELNETCONSOLE_PORT = [6023, 6073]

scrapy/tests/test_dependencies.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
1+
from importlib import import_module
12
from twisted.trial import unittest
23

34
class ScrapyUtilsTest(unittest.TestCase):
45
def test_required_openssl_version(self):
56
try:
6-
module = __import__('OpenSSL', {}, {}, [''])
7+
module = import_module('OpenSSL')
78
except ImportError as ex:
89
raise unittest.SkipTest("OpenSSL is not available")
910

scrapy/utils/misc.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22

33
import re
44
import hashlib
5+
6+
from importlib import import_module
57
from pkgutil import iter_modules
68

79
from w3lib.html import remove_entities
@@ -35,7 +37,7 @@ def load_object(path):
3537

3638
module, name = path[:dot], path[dot+1:]
3739
try:
38-
mod = __import__(module, {}, {}, [''])
40+
mod = import_module(module)
3941
except ImportError as e:
4042
raise ImportError("Error loading object '%s': %s" % (path, e))
4143

@@ -55,15 +57,15 @@ def walk_modules(path, load=False):
5557
"""
5658

5759
mods = []
58-
mod = __import__(path, {}, {}, [''])
60+
mod = import_module(path)
5961
mods.append(mod)
6062
if hasattr(mod, '__path__'):
6163
for _, subpath, ispkg in iter_modules(mod.__path__):
6264
fullpath = path + '.' + subpath
6365
if ispkg:
6466
mods += walk_modules(fullpath)
6567
else:
66-
submod = __import__(fullpath, {}, {}, [''])
68+
submod = import_module(fullpath)
6769
mods.append(submod)
6870
return mods
6971

scrapy/utils/project.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
import os
2-
from os.path import join, dirname, abspath, isabs, exists
32
import cPickle as pickle
43
import warnings
54

5+
from importlib import import_module
6+
from os.path import join, dirname, abspath, isabs, exists
7+
68
from scrapy.utils.conf import closest_scrapy_cfg, get_config, init_env
79
from scrapy.settings import CrawlerSettings
810
from scrapy.exceptions import NotConfigured
@@ -14,7 +16,7 @@ def inside_project():
1416
scrapy_module = os.environ.get('SCRAPY_SETTINGS_MODULE')
1517
if scrapy_module is not None:
1618
try:
17-
__import__(scrapy_module)
19+
import_module(scrapy_module)
1820
except ImportError as exc:
1921
warnings.warn("Cannot import scrapy settings module %s: %s" % (scrapy_module, exc))
2022
else:
@@ -53,7 +55,7 @@ def get_project_settings():
5355
init_env(project)
5456
settings_module_path = os.environ.get(ENVVAR)
5557
if settings_module_path:
56-
settings_module = __import__(settings_module_path, {}, {}, [''])
58+
settings_module = import_module(settings_module_path)
5759
else:
5860
settings_module = None
5961
settings = CrawlerSettings(settings_module)

scrapy/utils/test.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44

55
import os
66

7+
from importlib import import_module
78
from twisted.trial.unittest import SkipTest
89

910

@@ -39,7 +40,7 @@ class SettingsModuleMock(object):
3940
def get_pythonpath():
4041
"""Return a PYTHONPATH suitable to use in processes so that they find this
4142
installation of Scrapy"""
42-
scrapy_path = __import__('scrapy').__path__[0]
43+
scrapy_path = import_module('scrapy').__path__[0]
4344
return os.path.dirname(scrapy_path) + os.pathsep + os.environ.get('PYTHONPATH', '')
4445

4546
def get_testenv():

0 commit comments

Comments
 (0)