forked from scrapy/scrapy
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtest_downloadermiddleware_stats.py
71 lines (58 loc) · 2.6 KB
/
test_downloadermiddleware_stats.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
import warnings
from itertools import product
from unittest import TestCase
from scrapy.downloadermiddlewares.stats import DownloaderStats
from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.http import Request, Response
from scrapy.spiders import Spider
from scrapy.utils.response import response_httprepr
from scrapy.utils.test import get_crawler
class MyException(Exception):
pass
class TestDownloaderStats(TestCase):
def setUp(self):
self.crawler = get_crawler(Spider)
self.spider = self.crawler._create_spider("scrapytest.org")
self.mw = DownloaderStats(self.crawler.stats)
self.crawler.stats.open_spider(self.spider)
self.req = Request("http://scrapytest.org")
self.res = Response("scrapytest.org", status=400)
def assertStatsEqual(self, key, value):
self.assertEqual(
self.crawler.stats.get_value(key, spider=self.spider),
value,
str(self.crawler.stats.get_stats(self.spider)),
)
def test_process_request(self):
self.mw.process_request(self.req, self.spider)
self.assertStatsEqual("downloader/request_count", 1)
def test_process_response(self):
self.mw.process_response(self.req, self.res, self.spider)
self.assertStatsEqual("downloader/response_count", 1)
def test_response_len(self):
body = (b"", b"not_empty") # empty/notempty body
headers = (
{},
{"lang": "en"},
{"lang": "en", "User-Agent": "scrapy"},
) # 0 headers, 1h and 2h
test_responses = [ # form test responses with all combinations of body/headers
Response(url="scrapytest.org", status=200, body=r[0], headers=r[1])
for r in product(body, headers)
]
for test_response in test_responses:
self.crawler.stats.set_value("downloader/response_bytes", 0)
self.mw.process_response(self.req, test_response, self.spider)
with warnings.catch_warnings():
warnings.simplefilter("ignore", ScrapyDeprecationWarning)
resp_size = len(response_httprepr(test_response))
self.assertStatsEqual("downloader/response_bytes", resp_size)
def test_process_exception(self):
self.mw.process_exception(self.req, MyException(), self.spider)
self.assertStatsEqual("downloader/exception_count", 1)
self.assertStatsEqual(
"downloader/exception_type_count/tests.test_downloadermiddleware_stats.MyException",
1,
)
def tearDown(self):
self.crawler.stats.close_spider(self.spider, "")