Skip to content

Commit fa1c25c

Browse files
committed
Merge pull request scrapy#1286 from scrapy/configure_logging
configure_logging: change the meaning of settings=None
2 parents d862f5a + 36bc912 commit fa1c25c

File tree

3 files changed

+66
-56
lines changed

3 files changed

+66
-56
lines changed

docs/topics/logging.rst

Lines changed: 16 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -194,42 +194,33 @@ scrapy.utils.log module
194194
.. module:: scrapy.utils.log
195195
:synopsis: Logging utils
196196

197-
.. function:: configure_logging(settings=None)
197+
.. autofunction:: configure_logging
198198

199-
This function initializes logging defaults for Scrapy.
200-
201-
It's automatically called when using Scrapy commands, but needs to be
202-
called explicitely when running custom scripts. In that case, its usage is
203-
not required but it's recommended.
204-
205-
This function does:
206-
- Route warnings and Twisted logging through Python standard logging
207-
- Set a filter on Scrapy logger for formatting Twisted failures
208-
- Assign DEBUG and ERROR levels to Scrapy and Twisted loggers
209-
respectively
210-
211-
If `settings` is not ``None``, it will also create a root handler based on
212-
the settings listed in :ref:`topics-logging-settings`.
199+
``configure_logging`` is automatically called when using Scrapy commands,
200+
but needs to be called explicitly when running custom scripts. In that
201+
case, its usage is not required but it's recommended.
213202

214203
If you plan on configuring the handlers yourself is still recommended you
215-
call this function, keeping `settings` as ``None``. Bear in mind there
216-
won't be any log output set by default in that case.
204+
call this function, passing `install_root_handler=False`. Bear in mind
205+
there won't be any log output set by default in that case.
217206

218207
To get you started on manually configuring logging's output, you can use
219-
`logging.basicConfig()`_ to set a basic root handler. This is an example on
220-
how to redirect ``INFO`` or higher messages to a file::
208+
`logging.basicConfig()`_ to set a basic root handler. This is an example
209+
on how to redirect ``INFO`` or higher messages to a file::
221210

222211
import logging
223212
from scrapy.utils.log import configure_logging
224213

225-
configure_logging() # Note we aren't providing settings in this case
226-
logging.basicConfig(filename='log.txt', format='%(levelname)s: %(message)s', level=logging.INFO)
214+
configure_logging(install_root_handler=False)
215+
logging.basicConfig(
216+
filename='log.txt',
217+
format='%(levelname)s: %(message)s',
218+
level=logging.INFO
219+
)
227220

228221
Refer to :ref:`run-from-script` for more details about using Scrapy this
229222
way.
230223

231-
:param settings: settings used to create and configure a handler for the
232-
root logger.
233-
:type settings: :class:`~scrapy.settings.Settings` object or ``None``
234-
235224
.. _logging.basicConfig(): https://docs.python.org/2/library/logging.html#logging.basicConfig
225+
226+

docs/topics/practices.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ Same example using :class:`~scrapy.crawler.CrawlerRunner`:
148148
# Your second spider definition
149149
...
150150

151-
configure_logging({})
151+
configure_logging()
152152
runner = CrawlerRunner()
153153
runner.crawl(MySpider1)
154154
runner.crawl(MySpider2)
@@ -173,7 +173,7 @@ Same example but running the spiders sequentially by chaining the deferreds:
173173
# Your second spider definition
174174
...
175175

176-
configure_logging({})
176+
configure_logging()
177177
runner = CrawlerRunner()
178178

179179
@defer.inlineCallbacks

scrapy/utils/log.py

Lines changed: 48 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -56,14 +56,29 @@ def filter(self, record):
5656
}
5757

5858

59-
def configure_logging(settings=None):
60-
"""Initialize and configure default loggers
59+
def configure_logging(settings=None, install_root_handler=True):
60+
"""
61+
Initialize logging defaults for Scrapy.
62+
63+
:param settings: settings used to create and configure a handler for the
64+
root logger (default: None).
65+
:type settings: dict, :class:`~scrapy.settings.Settings` object or ``None``
66+
67+
:param install_root_handler: whether to install root logging handler
68+
(default: True)
69+
:type install_root_handler: bool
6170
6271
This function does:
63-
- Route warnings and twisted logging through Python standard logging
64-
- Set FailureFormatter filter on Scrapy logger
65-
- Assign DEBUG and ERROR level to Scrapy and Twisted loggers respectively
66-
- Create a handler for the root logger according to given settings
72+
73+
- Route warnings and twisted logging through Python standard logging
74+
- Assign DEBUG and ERROR level to Scrapy and Twisted loggers respectively
75+
- Route stdout to log if LOG_STDOUT setting is True
76+
77+
When ``install_root_handler`` is True (default), this function also
78+
creates a handler for the root logger according to given settings
79+
(see :ref:`topics-logging-settings`). You can override default options
80+
using ``settings`` argument. When ``settings`` is empty or None, defaults
81+
are used.
6782
"""
6883
if not sys.warnoptions:
6984
# Route warnings through python logging
@@ -74,35 +89,39 @@ def configure_logging(settings=None):
7489

7590
dictConfig(DEFAULT_LOGGING)
7691

77-
if isinstance(settings, dict):
92+
if isinstance(settings, dict) or settings is None:
7893
settings = Settings(settings)
7994

80-
if settings:
81-
logging.root.setLevel(logging.NOTSET)
95+
if settings.getbool('LOG_STDOUT'):
96+
sys.stdout = StreamLogger(logging.getLogger('stdout'))
8297

83-
if settings.getbool('LOG_STDOUT'):
84-
sys.stdout = StreamLogger(logging.getLogger('stdout'))
85-
86-
# Set up the default log handler
87-
filename = settings.get('LOG_FILE')
88-
if filename:
89-
encoding = settings.get('LOG_ENCODING')
90-
handler = logging.FileHandler(filename, encoding=encoding)
91-
elif settings.getbool('LOG_ENABLED'):
92-
handler = logging.StreamHandler()
93-
else:
94-
handler = logging.NullHandler()
95-
96-
formatter = logging.Formatter(
97-
fmt=settings.get('LOG_FORMAT'),
98-
datefmt=settings.get('LOG_DATEFORMAT')
99-
)
100-
handler.setFormatter(formatter)
101-
handler.setLevel(settings.get('LOG_LEVEL'))
102-
handler.addFilter(TopLevelFormatter(['scrapy']))
98+
if install_root_handler:
99+
logging.root.setLevel(logging.NOTSET)
100+
handler = _get_handler(settings)
103101
logging.root.addHandler(handler)
104102

105103

104+
def _get_handler(settings):
105+
""" Return a log handler object according to settings """
106+
filename = settings.get('LOG_FILE')
107+
if filename:
108+
encoding = settings.get('LOG_ENCODING')
109+
handler = logging.FileHandler(filename, encoding=encoding)
110+
elif settings.getbool('LOG_ENABLED'):
111+
handler = logging.StreamHandler()
112+
else:
113+
handler = logging.NullHandler()
114+
115+
formatter = logging.Formatter(
116+
fmt=settings.get('LOG_FORMAT'),
117+
datefmt=settings.get('LOG_DATEFORMAT')
118+
)
119+
handler.setFormatter(formatter)
120+
handler.setLevel(settings.get('LOG_LEVEL'))
121+
handler.addFilter(TopLevelFormatter(['scrapy']))
122+
return handler
123+
124+
106125
def log_scrapy_info(settings):
107126
logger.info("Scrapy %(version)s started (bot: %(bot)s)",
108127
{'version': scrapy.__version__, 'bot': settings['BOT_NAME']})

0 commit comments

Comments
 (0)