@@ -56,14 +56,29 @@ def filter(self, record):
56
56
}
57
57
58
58
59
- def configure_logging (settings = None ):
60
- """Initialize and configure default loggers
59
+ def configure_logging (settings = None , install_root_handler = True ):
60
+ """
61
+ Initialize logging defaults for Scrapy.
62
+
63
+ :param settings: settings used to create and configure a handler for the
64
+ root logger (default: None).
65
+ :type settings: dict, :class:`~scrapy.settings.Settings` object or ``None``
66
+
67
+ :param install_root_handler: whether to install root logging handler
68
+ (default: True)
69
+ :type install_root_handler: bool
61
70
62
71
This function does:
63
- - Route warnings and twisted logging through Python standard logging
64
- - Set FailureFormatter filter on Scrapy logger
65
- - Assign DEBUG and ERROR level to Scrapy and Twisted loggers respectively
66
- - Create a handler for the root logger according to given settings
72
+
73
+ - Route warnings and twisted logging through Python standard logging
74
+ - Assign DEBUG and ERROR level to Scrapy and Twisted loggers respectively
75
+ - Route stdout to log if LOG_STDOUT setting is True
76
+
77
+ When ``install_root_handler`` is True (default), this function also
78
+ creates a handler for the root logger according to given settings
79
+ (see :ref:`topics-logging-settings`). You can override default options
80
+ using ``settings`` argument. When ``settings`` is empty or None, defaults
81
+ are used.
67
82
"""
68
83
if not sys .warnoptions :
69
84
# Route warnings through python logging
@@ -74,35 +89,39 @@ def configure_logging(settings=None):
74
89
75
90
dictConfig (DEFAULT_LOGGING )
76
91
77
- if isinstance (settings , dict ):
92
+ if isinstance (settings , dict ) or settings is None :
78
93
settings = Settings (settings )
79
94
80
- if settings :
81
- logging . root . setLevel (logging .NOTSET )
95
+ if settings . getbool ( 'LOG_STDOUT' ) :
96
+ sys . stdout = StreamLogger (logging .getLogger ( 'stdout' ) )
82
97
83
- if settings .getbool ('LOG_STDOUT' ):
84
- sys .stdout = StreamLogger (logging .getLogger ('stdout' ))
85
-
86
- # Set up the default log handler
87
- filename = settings .get ('LOG_FILE' )
88
- if filename :
89
- encoding = settings .get ('LOG_ENCODING' )
90
- handler = logging .FileHandler (filename , encoding = encoding )
91
- elif settings .getbool ('LOG_ENABLED' ):
92
- handler = logging .StreamHandler ()
93
- else :
94
- handler = logging .NullHandler ()
95
-
96
- formatter = logging .Formatter (
97
- fmt = settings .get ('LOG_FORMAT' ),
98
- datefmt = settings .get ('LOG_DATEFORMAT' )
99
- )
100
- handler .setFormatter (formatter )
101
- handler .setLevel (settings .get ('LOG_LEVEL' ))
102
- handler .addFilter (TopLevelFormatter (['scrapy' ]))
98
+ if install_root_handler :
99
+ logging .root .setLevel (logging .NOTSET )
100
+ handler = _get_handler (settings )
103
101
logging .root .addHandler (handler )
104
102
105
103
104
+ def _get_handler (settings ):
105
+ """ Return a log handler object according to settings """
106
+ filename = settings .get ('LOG_FILE' )
107
+ if filename :
108
+ encoding = settings .get ('LOG_ENCODING' )
109
+ handler = logging .FileHandler (filename , encoding = encoding )
110
+ elif settings .getbool ('LOG_ENABLED' ):
111
+ handler = logging .StreamHandler ()
112
+ else :
113
+ handler = logging .NullHandler ()
114
+
115
+ formatter = logging .Formatter (
116
+ fmt = settings .get ('LOG_FORMAT' ),
117
+ datefmt = settings .get ('LOG_DATEFORMAT' )
118
+ )
119
+ handler .setFormatter (formatter )
120
+ handler .setLevel (settings .get ('LOG_LEVEL' ))
121
+ handler .addFilter (TopLevelFormatter (['scrapy' ]))
122
+ return handler
123
+
124
+
106
125
def log_scrapy_info (settings ):
107
126
logger .info ("Scrapy %(version)s started (bot: %(bot)s)" ,
108
127
{'version' : scrapy .__version__ , 'bot' : settings ['BOT_NAME' ]})
0 commit comments