Skip to content

Commit 9adb5c3

Browse files
committed
Merge pull request scrapy#1433 from scrapy/codecov
Coverage and reports at codecov.io and coveralls.io
2 parents 12bebb6 + 27077d2 commit 9adb5c3

9 files changed

+57
-5
lines changed

.coveragerc

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,19 @@
11
[run]
2+
branch = true
23
include = scrapy/*
3-
omit = scrapy/xlib*,scrapy/tests*
4+
omit =
5+
tests/*
6+
scrapy/xlib/*
7+
scrapy/conf.py
8+
scrapy/stats.py
9+
scrapy/project.py
10+
scrapy/utils/decorator.py
11+
scrapy/statscol.py
12+
scrapy/squeue.py
13+
scrapy/log.py
14+
scrapy/dupefilter.py
15+
scrapy/command.py
16+
scrapy/linkextractor.py
17+
scrapy/spider.py
18+
scrapy/contrib/*
19+
scrapy/contrib_exp/*

.travis.yml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,11 @@ env:
77
- TOXENV=py33
88
- TOXENV=docs
99
install:
10-
- pip install -U tox twine wheel
10+
- pip install -U tox twine wheel codecov coveralls
1111
script: tox
12+
after_success:
13+
- codecov
14+
- coveralls
1215
notifications:
1316
irc:
1417
use_notice: true

README.rst

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,10 @@ Scrapy
1818
:target: https://github.com/scrapy/scrapy/wiki/Python-3-Porting
1919
:alt: Python 3 Porting Status
2020

21+
.. image:: https://img.shields.io/codecov/c/github/scrapy/scrapy/master.svg
22+
:target: http://codecov.io/github/scrapy/scrapy?branch=master
23+
:alt: Coverage report
24+
2125

2226
Overview
2327
========

docs/contributing.rst

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -154,6 +154,13 @@ To run a specific test (say ``tests/test_loader.py``) use:
154154

155155
``tox -- tests/test_loader.py``
156156

157+
To see coverage report install `coverage`_ (``pip install coverage``) and run:
158+
159+
``coverage report``
160+
161+
see output of ``coverage --help`` for more options like html or xml report.
162+
163+
.. _coverage: https://pypi.python.org/pypi/coverage
157164

158165
Writing tests
159166
-------------

tests/__init__.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,14 @@
1313
os.environ['https_proxy'] = ''
1414
os.environ['ftp_proxy'] = ''
1515

16+
# Absolutize paths to coverage config and output file because tests that
17+
# spawn subprocesses also changes current working directory.
18+
_sourceroot = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
19+
if 'COV_CORE_CONFIG' in os.environ:
20+
os.environ['COVERAGE_FILE'] = os.path.join(_sourceroot, '.coverage')
21+
os.environ['COV_CORE_CONFIG'] = os.path.join(_sourceroot,
22+
os.environ['COV_CORE_CONFIG'])
23+
1624
try:
1725
import unittest.mock as mock
1826
except ImportError:

tests/requirements-py3.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
pytest>=2.6.0
22
pytest-twisted
3+
pytest-cov
34
testfixtures
45
jmespath

tests/requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,5 +3,6 @@ mock
33
mitmproxy==0.10.1
44
netlib==0.10.1
55
pytest-twisted
6+
pytest-cov
67
jmespath
78
testfixtures

tests/test_command_version.py

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,4 +14,16 @@ class VersionTest(ProcessTest, unittest.TestCase):
1414
def test_output(self):
1515
encoding = getattr(sys.stdout, 'encoding') or 'utf-8'
1616
_, out, _ = yield self.execute([])
17-
self.assertEqual(out.strip().decode(encoding), "Scrapy %s" % scrapy.__version__)
17+
self.assertEqual(
18+
out.strip().decode(encoding),
19+
"Scrapy %s" % scrapy.__version__,
20+
)
21+
22+
@defer.inlineCallbacks
23+
def test_verbose_output(self):
24+
encoding = getattr(sys.stdout, 'encoding') or 'utf-8'
25+
_, out, _ = yield self.execute(['-v'])
26+
headers = [l.partition(":")[0].strip()
27+
for l in out.strip().decode(encoding).splitlines()]
28+
self.assertEqual(headers, ['Scrapy', 'lxml', 'libxml2', 'Twisted',
29+
'Python', 'pyOpenSSL', 'Platform'])

tox.ini

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ deps =
1515
leveldb
1616
-rtests/requirements.txt
1717
commands =
18-
py.test {posargs:scrapy tests}
18+
py.test --cov=scrapy --cov-report= {posargs:scrapy tests}
1919

2020
[testenv:precise]
2121
basepython = python2.7
@@ -34,7 +34,7 @@ basepython = python2.7
3434
commands =
3535
pip install -U https://github.com/scrapy/w3lib/archive/master.zip#egg=w3lib
3636
pip install -U https://github.com/scrapy/queuelib/archive/master.zip#egg=queuelib
37-
py.test {posargs:scrapy tests}
37+
py.test --cov=scrapy --cov-report= {posargs:scrapy tests}
3838

3939
[testenv:py33]
4040
basepython = python3.3

0 commit comments

Comments
 (0)