Skip to content

Commit 6b1760d

Browse files
rmaxdangra
authored andcommitted
replaced __import__ by importlib.import_module.
Since python 2.7, importlib.import_module is the recommended way to import modules programmatically. From __import__'s doc: Import a module. Because this function is meant for use by the Python interpreter and not for general use it is better to use importlib.import_module() to programmatically import a module.
1 parent 2318c56 commit 6b1760d

File tree

6 files changed

+18
-10
lines changed

6 files changed

+18
-10
lines changed

scrapy/commands/genspider.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22
import os
33
import shutil
44
import string
5+
6+
from importlib import import_module
57
from os.path import join, dirname, abspath, exists, splitext
68

79
import scrapy
@@ -90,7 +92,7 @@ def _genspider(self, module, name, domain, template_name, template_file):
9092
'classname': '%sSpider' % ''.join([s.capitalize() \
9193
for s in module.split('_')])
9294
}
93-
spiders_module = __import__(self.settings['NEWSPIDER_MODULE'], {}, {}, [''])
95+
spiders_module = import_module(self.settings['NEWSPIDER_MODULE'])
9496
spiders_dir = abspath(dirname(spiders_module.__file__))
9597
spider_file = "%s.py" % join(spiders_dir, module)
9698
shutil.copyfile(template_file, spider_file)

scrapy/commands/runspider.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import sys
22
import os
3+
from importlib import import_module
34

45
from scrapy.utils.spider import iter_spider_classes
56
from scrapy.command import ScrapyCommand
@@ -15,7 +16,7 @@ def _import_file(filepath):
1516
if dirname:
1617
sys.path = [dirname] + sys.path
1718
try:
18-
module = __import__(fname, {}, {}, [''])
19+
module = import_module(fname)
1920
finally:
2021
if dirname:
2122
sys.path.pop(0)

scrapy/contrib/httpcache.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from __future__ import print_function
22
import os
33
import cPickle as pickle
4+
from importlib import import_module
45
from time import time
56
from weakref import WeakKeyDictionary
67
from email.utils import mktime_tz, parsedate_tz
@@ -164,7 +165,7 @@ class DbmCacheStorage(object):
164165
def __init__(self, settings):
165166
self.cachedir = data_path(settings['HTTPCACHE_DIR'], createdir=True)
166167
self.expiration_secs = settings.getint('HTTPCACHE_EXPIRATION_SECS')
167-
self.dbmodule = __import__(settings['HTTPCACHE_DBM_MODULE'], {}, {}, [''])
168+
self.dbmodule = import_module(settings['HTTPCACHE_DBM_MODULE'])
168169
self.db = None
169170

170171
def open_spider(self, spider):

scrapy/tests/test_dependencies.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
class ScrapyUtilsTest(unittest.TestCase):
44
def test_required_openssl_version(self):
55
try:
6-
module = __import__('OpenSSL', {}, {}, [''])
6+
module = __import__('OpenSSL')
77
except ImportError as ex:
88
raise unittest.SkipTest("OpenSSL is not available")
99

scrapy/utils/misc.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22

33
import re
44
import hashlib
5+
6+
from importlib import import_module
57
from pkgutil import iter_modules
68

79
from w3lib.html import remove_entities
@@ -35,7 +37,7 @@ def load_object(path):
3537

3638
module, name = path[:dot], path[dot+1:]
3739
try:
38-
mod = __import__(module, {}, {}, [''])
40+
mod = import_module(module)
3941
except ImportError as e:
4042
raise ImportError("Error loading object '%s': %s" % (path, e))
4143

@@ -55,15 +57,15 @@ def walk_modules(path, load=False):
5557
"""
5658

5759
mods = []
58-
mod = __import__(path, {}, {}, [''])
60+
mod = import_module(path)
5961
mods.append(mod)
6062
if hasattr(mod, '__path__'):
6163
for _, subpath, ispkg in iter_modules(mod.__path__):
6264
fullpath = path + '.' + subpath
6365
if ispkg:
6466
mods += walk_modules(fullpath)
6567
else:
66-
submod = __import__(fullpath, {}, {}, [''])
68+
submod = import_module(fullpath)
6769
mods.append(submod)
6870
return mods
6971

scrapy/utils/project.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
import os
2-
from os.path import join, dirname, abspath, isabs, exists
32
import cPickle as pickle
43
import warnings
54

5+
from importlib import import_module
6+
from os.path import join, dirname, abspath, isabs, exists
7+
68
from scrapy.utils.conf import closest_scrapy_cfg, get_config, init_env
79
from scrapy.settings import CrawlerSettings
810
from scrapy.exceptions import NotConfigured
@@ -14,7 +16,7 @@ def inside_project():
1416
scrapy_module = os.environ.get('SCRAPY_SETTINGS_MODULE')
1517
if scrapy_module is not None:
1618
try:
17-
__import__(scrapy_module)
19+
import_module(scrapy_module)
1820
except ImportError as exc:
1921
warnings.warn("Cannot import scrapy settings module %s: %s" % (scrapy_module, exc))
2022
else:
@@ -53,7 +55,7 @@ def get_project_settings():
5355
init_env(project)
5456
settings_module_path = os.environ.get(ENVVAR)
5557
if settings_module_path:
56-
settings_module = __import__(settings_module_path, {}, {}, [''])
58+
settings_module = import_module(settings_module_path)
5759
else:
5860
settings_module = None
5961
settings = CrawlerSettings(settings_module)

0 commit comments

Comments
 (0)