getting file size for all dict files to be downloaded. coming to be 400mb or so.
[worddb.git] / libs / openid / test / test_fetchers.py
blobda1eea842ea9428e11d8d0cddc449ea532ea572a
1 import warnings
2 import unittest
3 import sys
4 import urllib2
5 import socket
7 from openid import fetchers
9 # XXX: make these separate test cases
11 def failUnlessResponseExpected(expected, actual):
12 assert expected.final_url == actual.final_url, (
13 "%r != %r" % (expected.final_url, actual.final_url))
14 assert expected.status == actual.status
15 assert expected.body == actual.body
16 got_headers = dict(actual.headers)
17 del got_headers['date']
18 del got_headers['server']
19 for k, v in expected.headers.iteritems():
20 assert got_headers[k] == v, (k, v, got_headers[k])
22 def test_fetcher(fetcher, exc, server):
23 def geturl(path):
24 return 'http://%s:%s%s' % (socket.getfqdn(server.server_name),
25 server.socket.getsockname()[1],
26 path)
28 expected_headers = {'content-type':'text/plain'}
30 def plain(path, code):
31 path = '/' + path
32 expected = fetchers.HTTPResponse(
33 geturl(path), code, expected_headers, path)
34 return (path, expected)
36 expect_success = fetchers.HTTPResponse(
37 geturl('/success'), 200, expected_headers, '/success')
38 cases = [
39 ('/success', expect_success),
40 ('/301redirect', expect_success),
41 ('/302redirect', expect_success),
42 ('/303redirect', expect_success),
43 ('/307redirect', expect_success),
44 plain('notfound', 404),
45 plain('badreq', 400),
46 plain('forbidden', 403),
47 plain('error', 500),
48 plain('server_error', 503),
51 for path, expected in cases:
52 fetch_url = geturl(path)
53 try:
54 actual = fetcher.fetch(fetch_url)
55 except (SystemExit, KeyboardInterrupt):
56 pass
57 except:
58 print fetcher, fetch_url
59 raise
60 else:
61 failUnlessResponseExpected(expected, actual)
63 for err_url in [geturl('/closed'),
64 'http://invalid.janrain.com/',
65 'not:a/url',
66 'ftp://janrain.com/pub/']:
67 try:
68 result = fetcher.fetch(err_url)
69 except (KeyboardInterrupt, SystemExit):
70 raise
71 except fetchers.HTTPError, why:
72 # This is raised by the Curl fetcher for bad cases
73 # detected by the fetchers module, but it's a subclass of
74 # HTTPFetchingError, so we have to catch it explicitly.
75 assert exc
76 except fetchers.HTTPFetchingError, why:
77 assert not exc, (fetcher, exc, server)
78 except:
79 assert exc
80 else:
81 assert False, 'An exception was expected for %r (%r)' % (fetcher, result)
83 def run_fetcher_tests(server):
84 exc_fetchers = []
85 for klass, library_name in [
86 (fetchers.Urllib2Fetcher, 'urllib2'),
87 (fetchers.CurlHTTPFetcher, 'pycurl'),
88 (fetchers.HTTPLib2Fetcher, 'httplib2'),
90 try:
91 exc_fetchers.append(klass())
92 except RuntimeError, why:
93 if why[0].startswith('Cannot find %s library' % (library_name,)):
94 try:
95 __import__(library_name)
96 except ImportError:
97 warnings.warn(
98 'Skipping tests for %r fetcher because '
99 'the library did not import.' % (library_name,))
100 pass
101 else:
102 assert False, ('%s present but not detected' % (library_name,))
103 else:
104 raise
106 non_exc_fetchers = []
107 for f in exc_fetchers:
108 non_exc_fetchers.append(fetchers.ExceptionWrappingFetcher(f))
110 for f in exc_fetchers:
111 test_fetcher(f, True, server)
113 for f in non_exc_fetchers:
114 test_fetcher(f, False, server)
116 from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
118 class FetcherTestHandler(BaseHTTPRequestHandler):
119 cases = {
120 '/success':(200, None),
121 '/301redirect':(301, '/success'),
122 '/302redirect':(302, '/success'),
123 '/303redirect':(303, '/success'),
124 '/307redirect':(307, '/success'),
125 '/notfound':(404, None),
126 '/badreq':(400, None),
127 '/forbidden':(403, None),
128 '/error':(500, None),
129 '/server_error':(503, None),
132 def log_request(self, *args):
133 pass
135 def do_GET(self):
136 if self.path == '/closed':
137 self.wfile.close()
138 else:
139 try:
140 http_code, location = self.cases[self.path]
141 except KeyError:
142 self.errorResponse('Bad path')
143 else:
144 extra_headers = [('Content-type', 'text/plain')]
145 if location is not None:
146 host, port = self.server.server_address
147 base = ('http://%s:%s' % (socket.getfqdn(host), port,))
148 location = base + location
149 extra_headers.append(('Location', location))
150 self._respond(http_code, extra_headers, self.path)
152 def do_POST(self):
153 try:
154 http_code, extra_headers = self.cases[self.path]
155 except KeyError:
156 self.errorResponse('Bad path')
157 else:
158 if http_code in [301, 302, 303, 307]:
159 self.errorResponse()
160 else:
161 content_type = self.headers.get('content-type', 'text/plain')
162 extra_headers.append(('Content-type', content_type))
163 content_length = int(self.headers.get('Content-length', '-1'))
164 body = self.rfile.read(content_length)
165 self._respond(http_code, extra_headers, body)
167 def errorResponse(self, message=None):
168 req = [
169 ('HTTP method', self.command),
170 ('path', self.path),
172 if message:
173 req.append(('message', message))
175 body_parts = ['Bad request:\r\n']
176 for k, v in req:
177 body_parts.append(' %s: %s\r\n' % (k, v))
178 body = ''.join(body_parts)
179 self._respond(400, [('Content-type', 'text/plain')], body)
181 def _respond(self, http_code, extra_headers, body):
182 self.send_response(http_code)
183 for k, v in extra_headers:
184 self.send_header(k, v)
185 self.end_headers()
186 self.wfile.write(body)
187 self.wfile.close()
189 def finish(self):
190 if not self.wfile.closed:
191 self.wfile.flush()
192 self.wfile.close()
193 self.rfile.close()
195 def test():
196 import socket
197 host = socket.getfqdn('127.0.0.1')
198 # When I use port 0 here, it works for the first fetch and the
199 # next one gets connection refused. Bummer. So instead, pick a
200 # port that's *probably* not in use.
201 import os
202 port = (os.getpid() % 31000) + 1024
204 server = HTTPServer((host, port), FetcherTestHandler)
206 import threading
207 server_thread = threading.Thread(target=server.serve_forever)
208 server_thread.setDaemon(True)
209 server_thread.start()
211 run_fetcher_tests(server)
213 class FakeFetcher(object):
214 sentinel = object()
216 def fetch(self, *args, **kwargs):
217 return self.sentinel
219 class DefaultFetcherTest(unittest.TestCase):
220 def setUp(self):
221 """reset the default fetcher to None"""
222 fetchers.setDefaultFetcher(None)
224 def tearDown(self):
225 """reset the default fetcher to None"""
226 fetchers.setDefaultFetcher(None)
228 def test_getDefaultNotNone(self):
229 """Make sure that None is never returned as a default fetcher"""
230 self.failUnless(fetchers.getDefaultFetcher() is not None)
231 fetchers.setDefaultFetcher(None)
232 self.failUnless(fetchers.getDefaultFetcher() is not None)
234 def test_setDefault(self):
235 """Make sure the getDefaultFetcher returns the object set for
236 setDefaultFetcher"""
237 sentinel = object()
238 fetchers.setDefaultFetcher(sentinel, wrap_exceptions=False)
239 self.failUnless(fetchers.getDefaultFetcher() is sentinel)
241 def test_callFetch(self):
242 """Make sure that fetchers.fetch() uses the default fetcher
243 instance that was set."""
244 fetchers.setDefaultFetcher(FakeFetcher())
245 actual = fetchers.fetch('bad://url')
246 self.failUnless(actual is FakeFetcher.sentinel)
248 def test_wrappedByDefault(self):
249 """Make sure that the default fetcher instance wraps
250 exceptions by default"""
251 default_fetcher = fetchers.getDefaultFetcher()
252 self.failUnless(isinstance(default_fetcher,
253 fetchers.ExceptionWrappingFetcher),
254 default_fetcher)
256 self.failUnlessRaises(fetchers.HTTPFetchingError,
257 fetchers.fetch, 'http://invalid.janrain.com/')
259 def test_notWrapped(self):
260 """Make sure that if we set a non-wrapped fetcher as default,
261 it will not wrap exceptions."""
262 # A fetcher that will raise an exception when it encounters a
263 # host that will not resolve
264 fetcher = fetchers.Urllib2Fetcher()
265 fetchers.setDefaultFetcher(fetcher, wrap_exceptions=False)
267 self.failIf(isinstance(fetchers.getDefaultFetcher(),
268 fetchers.ExceptionWrappingFetcher))
270 try:
271 fetchers.fetch('http://invalid.janrain.com/')
272 except fetchers.HTTPFetchingError:
273 self.fail('Should not be wrapping exception')
274 except:
275 exc = sys.exc_info()[1]
276 self.failUnless(isinstance(exc, urllib2.URLError), exc)
277 pass
278 else:
279 self.fail('Should have raised an exception')
281 def pyUnitTests():
282 case1 = unittest.FunctionTestCase(test)
283 loadTests = unittest.defaultTestLoader.loadTestsFromTestCase
284 case2 = loadTests(DefaultFetcherTest)
285 return unittest.TestSuite([case1, case2])