Skip to content

Commit d5087b0

Browse files
committed
Merge branch '0.16' of github.com:scrapy/scrapy into 0.16
2 parents 7184094 + 1787011 commit d5087b0

File tree

6 files changed

+112
-61
lines changed

6 files changed

+112
-61
lines changed

docs/news.rst

+11
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,17 @@
33
Release notes
44
=============
55

6+
0.16.2 (released 2012-11-09)
7+
----------------------------
8+
9+
- scrapy contracts: python2.6 compat (:commit:`a4a9199`)
10+
- scrapy contracts verbose option (:commit:`ec41673`)
11+
- proper unittest-like output for scrapy contracts (:commit:`86635e4`)
12+
- added open_in_browser to debugging doc (:commit:`c9b690d`)
13+
- removed reference to global scrapy stats from settings doc (:commit:`dd55067`)
14+
- Fix SpiderState bug in Windows platforms (:commit:`58998f4`)
15+
16+
617
0.16.1 (released 2012-10-26)
718
----------------------------
819

scrapy/__init__.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@
22
Scrapy - a screen scraping framework written in Python
33
"""
44

5-
version_info = (0, 16, 1)
6-
__version__ = "0.16.1"
5+
version_info = (0, 16, 2)
6+
__version__ = "0.16.2"
77

88
import sys, os, warnings
99

scrapy/commands/check.py

+9-1
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,14 @@
11
from collections import defaultdict
22
from functools import wraps
3+
from unittest import TextTestRunner
34

5+
from scrapy import signals
46
from scrapy.command import ScrapyCommand
57
from scrapy.contracts import ContractsManager
68
from scrapy.utils.misc import load_object
79
from scrapy.utils.spider import iterate_spider_output
810
from scrapy.utils.conf import build_component_list
11+
from scrapy.xlib.pydispatch import dispatcher
912

1013

1114
def _generate(cb):
@@ -31,6 +34,8 @@ def add_options(self, parser):
3134
ScrapyCommand.add_options(self, parser)
3235
parser.add_option("-l", "--list", dest="list", action="store_true",
3336
help="only list contracts, without checking them")
37+
parser.add_option("-v", "--verbose", dest="verbose", default=1, action="count",
38+
help="print all contract hooks")
3439

3540
def run(self, args, opts):
3641
# load contracts
@@ -39,6 +44,7 @@ def run(self, args, opts):
3944
self.settings['SPIDER_CONTRACTS'],
4045
)
4146
self.conman = ContractsManager([load_object(c) for c in contracts])
47+
self.results = TextTestRunner(verbosity=opts.verbose)._makeResult()
4248

4349
# contract requests
4450
contract_reqs = defaultdict(list)
@@ -61,6 +67,8 @@ def run(self, args, opts):
6167
for method in sorted(methods):
6268
print ' * %s' % method
6369
else:
70+
dispatcher.connect(self.results.printErrors,
71+
signals.engine_stopped)
6472
self.crawler.start()
6573

6674
def get_requests(self, spider):
@@ -69,7 +77,7 @@ def get_requests(self, spider):
6977
for key, value in vars(type(spider)).items():
7078
if callable(value) and value.__doc__:
7179
bound_method = value.__get__(spider, type(spider))
72-
request = self.conman.from_method(bound_method)
80+
request = self.conman.from_method(bound_method, self.results)
7381

7482
if request:
7583
request.callback = _generate(request.callback)

scrapy/contracts/__init__.py

+57-38
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
1+
import sys
12
import re
23
from functools import wraps
4+
from unittest import TestCase
35

46
from scrapy.http import Request
57
from scrapy.utils.spider import iterate_spider_output
68
from scrapy.utils.python import get_spec
7-
from scrapy.exceptions import ContractFail
89

910

1011
class ContractsManager(object):
@@ -27,7 +28,7 @@ def extract_contracts(self, method):
2728

2829
return contracts
2930

30-
def from_method(self, method, fail=False):
31+
def from_method(self, method, results):
3132
contracts = self.extract_contracts(method)
3233
if contracts:
3334
# calculate request args
@@ -43,9 +44,9 @@ def from_method(self, method, fail=False):
4344

4445
# execute pre and post hooks in order
4546
for contract in reversed(contracts):
46-
request = contract.add_pre_hook(request, fail)
47+
request = contract.add_pre_hook(request, results)
4748
for contract in contracts:
48-
request = contract.add_post_hook(request, fail)
49+
request = contract.add_post_hook(request, results)
4950

5051
return request
5152

@@ -54,49 +55,67 @@ class Contract(object):
5455
""" Abstract class for contracts """
5556

5657
def __init__(self, method, *args):
57-
self.method = method
58+
self.testcase_pre = self.create_testcase(method, 'pre-hook')
59+
self.testcase_post = self.create_testcase(method, 'post-hook')
5860
self.args = args
5961

60-
def add_pre_hook(self, request, fail=False):
61-
cb = request.callback
62-
63-
@wraps(cb)
64-
def wrapper(response):
65-
try:
66-
self.pre_process(response)
67-
except ContractFail as e:
68-
if fail:
69-
raise
62+
def create_testcase(self, method, hook):
63+
spider = method.__self__.name
64+
65+
class ContractTestCase(TestCase):
66+
def __str__(_self):
67+
return "[%s] %s (@%s %s)" % (spider, method.__name__, self.name, hook)
68+
69+
name = '%s_%s' % (spider, method.__name__)
70+
setattr(ContractTestCase, name, lambda x: x)
71+
return ContractTestCase(name)
72+
73+
def add_pre_hook(self, request, results):
74+
if hasattr(self, 'pre_process'):
75+
cb = request.callback
76+
77+
@wraps(cb)
78+
def wrapper(response):
79+
try:
80+
results.startTest(self.testcase_pre)
81+
self.pre_process(response)
82+
results.stopTest(self.testcase_pre)
83+
except AssertionError:
84+
results.addFailure(self.testcase_pre, sys.exc_info())
85+
except Exception:
86+
results.addError(self.testcase_pre, sys.exc_info())
7087
else:
71-
print e.format(self.method)
72-
return list(iterate_spider_output(cb(response)))
88+
results.addSuccess(self.testcase_pre)
89+
finally:
90+
return list(iterate_spider_output(cb(response)))
91+
92+
request.callback = wrapper
7393

74-
request.callback = wrapper
7594
return request
7695

77-
def add_post_hook(self, request, fail=False):
78-
cb = request.callback
79-
80-
@wraps(cb)
81-
def wrapper(response):
82-
output = list(iterate_spider_output(cb(response)))
83-
try:
84-
self.post_process(output)
85-
except ContractFail as e:
86-
if fail:
87-
raise
96+
def add_post_hook(self, request, results):
97+
if hasattr(self, 'post_process'):
98+
cb = request.callback
99+
100+
@wraps(cb)
101+
def wrapper(response):
102+
try:
103+
output = list(iterate_spider_output(cb(response)))
104+
results.startTest(self.testcase_post)
105+
self.post_process(output)
106+
results.stopTest(self.testcase_post)
107+
except AssertionError:
108+
results.addFailure(self.testcase_post, sys.exc_info())
109+
except Exception:
110+
results.addError(self.testcase_post, sys.exc_info())
88111
else:
89-
print e.format(self.method)
90-
return output
112+
results.addSuccess(self.testcase_post)
113+
finally:
114+
return output
115+
116+
request.callback = wrapper
91117

92-
request.callback = wrapper
93118
return request
94119

95120
def adjust_request_args(self, args):
96121
return args
97-
98-
def pre_process(self, response):
99-
pass
100-
101-
def post_process(self, output):
102-
pass

scrapy/exceptions.py

+1-4
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,4 @@ class ScrapyDeprecationWarning(Warning):
5252

5353
class ContractFail(AssertionError):
5454
"""Error raised in case of a failing contract"""
55-
56-
def format(self, method):
57-
return '[FAILED] %s:%s\n>>> %s\n' % \
58-
(method.im_class.name, method.__name__, self)
55+
pass

scrapy/tests/test_contracts.py

+32-16
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
1+
from unittest import TextTestRunner
2+
13
from twisted.trial import unittest
24

35
from scrapy.spider import BaseSpider
46
from scrapy.http import Request
57
from scrapy.item import Item, Field
6-
from scrapy.exceptions import ContractFail
78
from scrapy.contracts import ContractsManager
89
from scrapy.contracts.default import (
910
UrlContract,
@@ -71,54 +72,69 @@ def parse_no_url(http://github.com/self,%20response):
7172
class ContractsManagerTest(unittest.TestCase):
7273
contracts = [UrlContract, ReturnsContract, ScrapesContract]
7374

75+
def setUp(self):
76+
self.conman = ContractsManager(self.contracts)
77+
self.results = TextTestRunner()._makeResult()
78+
self.results.stream = None
79+
80+
def should_succeed(self):
81+
self.assertFalse(self.results.failures)
82+
self.assertFalse(self.results.errors)
83+
84+
def should_fail(self):
85+
self.assertTrue(self.results.failures)
86+
self.assertFalse(self.results.errors)
87+
7488
def test_contracts(self):
75-
conman = ContractsManager(self.contracts)
89+
spider = TestSpider()
7690

7791
# extract contracts correctly
78-
contracts = conman.extract_contracts(TestSpider.returns_request)
92+
contracts = self.conman.extract_contracts(spider.returns_request)
7993
self.assertEqual(len(contracts), 2)
8094
self.assertEqual(frozenset(map(type, contracts)),
8195
frozenset([UrlContract, ReturnsContract]))
8296

8397
# returns request for valid method
84-
request = conman.from_method(TestSpider.returns_request)
98+
request = self.conman.from_method(spider.returns_request, self.results)
8599
self.assertNotEqual(request, None)
86100

87101
# no request for missing url
88-
request = conman.from_method(TestSpider.parse_no_url)
102+
request = self.conman.from_method(spider.parse_no_url, self.results)
89103
self.assertEqual(request, None)
90104

91105
def test_returns(self):
92-
conman = ContractsManager(self.contracts)
93-
94106
spider = TestSpider()
95107
response = ResponseMock()
96108

97109
# returns_item
98-
request = conman.from_method(spider.returns_item, fail=True)
110+
request = self.conman.from_method(spider.returns_item, self.results)
99111
output = request.callback(response)
100112
self.assertEqual(map(type, output), [TestItem])
113+
self.should_succeed()
101114

102115
# returns_request
103-
request = conman.from_method(spider.returns_request, fail=True)
116+
request = self.conman.from_method(spider.returns_request, self.results)
104117
output = request.callback(response)
105118
self.assertEqual(map(type, output), [Request])
119+
self.should_succeed()
106120

107121
# returns_fail
108-
request = conman.from_method(spider.returns_fail, fail=True)
109-
self.assertRaises(ContractFail, request.callback, response)
122+
request = self.conman.from_method(spider.returns_fail, self.results)
123+
request.callback(response)
124+
self.should_fail()
110125

111126
def test_scrapes(self):
112-
conman = ContractsManager(self.contracts)
113-
114127
spider = TestSpider()
115128
response = ResponseMock()
116129

117130
# scrapes_item_ok
118-
request = conman.from_method(spider.scrapes_item_ok, fail=True)
131+
request = self.conman.from_method(spider.scrapes_item_ok, self.results)
119132
output = request.callback(response)
120133
self.assertEqual(map(type, output), [TestItem])
134+
self.should_succeed()
121135

122136
# scrapes_item_fail
123-
request = conman.from_method(spider.scrapes_item_fail, fail=True)
124-
self.assertRaises(ContractFail, request.callback, response)
137+
request = self.conman.from_method(spider.scrapes_item_fail,
138+
self.results)
139+
request.callback(response)
140+
self.should_fail()

0 commit comments

Comments
 (0)