Merge Chromium + Blink git repositories
[chromium-blink-merge.git] / ppapi / native_client / tools / browser_tester / browsertester / server.py
blob971d0d6fc5e6359227b4f67a744f52a5a632ac6f
1 # Copyright (c) 2011 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 import BaseHTTPServer
6 import cgi
7 import mimetypes
8 import os
9 import os.path
10 import posixpath
11 import SimpleHTTPServer
12 import SocketServer
13 import threading
14 import time
15 import urllib
16 import urlparse
18 class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
20 def NormalizePath(self, path):
21 path = path.split('?', 1)[0]
22 path = path.split('#', 1)[0]
23 path = posixpath.normpath(urllib.unquote(path))
24 words = path.split('/')
26 bad = set((os.curdir, os.pardir, ''))
27 words = [word for word in words if word not in bad]
28 # The path of the request should always use POSIX-style path separators, so
29 # that the filename input of --map_file can be a POSIX-style path and still
30 # match correctly in translate_path().
31 return '/'.join(words)
33 def translate_path(self, path):
34 path = self.NormalizePath(path)
35 if path in self.server.file_mapping:
36 return self.server.file_mapping[path]
37 for extra_dir in self.server.serving_dirs:
38 # TODO(halyavin): set allowed paths in another parameter?
39 full_path = os.path.join(extra_dir, os.path.basename(path))
40 if os.path.isfile(full_path):
41 return full_path
43 # Try the complete relative path, not just a basename. This allows the
44 # user to serve everything recursively under extra_dir, not just one
45 # level deep.
47 # One use case for this is the Native Client SDK examples. The examples
48 # expect to be able to access files as relative paths from the root of
49 # the example directory.
50 # Sometimes two subdirectories contain files with the same name, so
51 # including all subdirectories in self.server.serving_dirs will not do
52 # the correct thing; (i.e. the wrong file will be chosen, even though the
53 # correct path was given).
54 full_path = os.path.join(extra_dir, path)
55 if os.path.isfile(full_path):
56 return full_path
57 if not path.endswith('favicon.ico') and not self.server.allow_404:
58 self.server.listener.ServerError('Cannot find file \'%s\'' % path)
59 return path
61 def guess_type(self, path):
62 # We store the extension -> MIME type mapping in the server instead of the
63 # request handler so we that can add additional mapping entries via the
64 # command line.
65 base, ext = posixpath.splitext(path)
66 if ext in self.server.extensions_mapping:
67 return self.server.extensions_mapping[ext]
68 ext = ext.lower()
69 if ext in self.server.extensions_mapping:
70 return self.server.extensions_mapping[ext]
71 else:
72 return self.server.extensions_mapping['']
74 def SendRPCResponse(self, response):
75 self.send_response(200)
76 self.send_header("Content-type", "text/plain")
77 self.send_header("Content-length", str(len(response)))
78 self.end_headers()
79 self.wfile.write(response)
81 # shut down the connection
82 self.wfile.flush()
83 self.connection.shutdown(1)
85 def HandleRPC(self, name, query):
86 kargs = {}
87 for k, v in query.iteritems():
88 assert len(v) == 1, k
89 kargs[k] = v[0]
91 l = self.server.listener
92 try:
93 response = getattr(l, name)(**kargs)
94 except Exception, e:
95 self.SendRPCResponse('%r' % (e,))
96 raise
97 else:
98 self.SendRPCResponse(response)
100 # For Last-Modified-based caching, the timestamp needs to be old enough
101 # for the browser cache to be used (at least 60 seconds).
102 # http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html
103 # Often we clobber and regenerate files for testing, so this is needed
104 # to actually use the browser cache.
105 def send_header(self, keyword, value):
106 if keyword == 'Last-Modified':
107 last_mod_format = '%a, %d %b %Y %H:%M:%S GMT'
108 old_value_as_t = time.strptime(value, last_mod_format)
109 old_value_in_secs = time.mktime(old_value_as_t)
110 new_value_in_secs = old_value_in_secs - 360
111 value = time.strftime(last_mod_format,
112 time.localtime(new_value_in_secs))
113 SimpleHTTPServer.SimpleHTTPRequestHandler.send_header(self,
114 keyword,
115 value)
117 def do_POST(self):
118 # Backwards compatible - treat result as tuple without named fields.
119 _, _, path, _, query, _ = urlparse.urlparse(self.path)
121 self.server.listener.Log('POST %s (%s)' % (self.path, path))
122 if path == '/echo':
123 self.send_response(200)
124 self.end_headers()
125 data = self.rfile.read(int(self.headers.getheader('content-length')))
126 self.wfile.write(data)
127 elif self.server.output_dir is not None:
128 # Try to write the file to disk.
129 path = self.NormalizePath(path)
130 output_path = os.path.join(self.server.output_dir, path)
131 try:
132 outfile = open(output_path, 'w')
133 except IOError:
134 error_message = 'File not found: %r' % output_path
135 self.server.listener.ServerError(error_message)
136 self.send_error(404, error_message)
137 return
139 try:
140 data = self.rfile.read(int(self.headers.getheader('content-length')))
141 outfile.write(data)
142 except IOError, e:
143 outfile.close()
144 try:
145 os.remove(output_path)
146 except OSError:
147 # Oh, well.
148 pass
149 error_message = 'Can\'t write file: %r\n' % output_path
150 error_message += 'Exception:\n%s' % str(e)
151 self.server.listener.ServerError(error_message)
152 self.send_error(500, error_message)
153 return
155 outfile.close()
157 # Send a success response.
158 self.send_response(200)
159 self.end_headers()
160 else:
161 error_message = 'File not found: %r' % path
162 self.server.listener.ServerError(error_message)
163 self.send_error(404, error_message)
165 self.server.ResetTimeout()
167 def do_GET(self):
168 # Backwards compatible - treat result as tuple without named fields.
169 _, _, path, _, query, _ = urlparse.urlparse(self.path)
171 tester = '/TESTER/'
172 if path.startswith(tester):
173 # If the path starts with '/TESTER/', the GET is an RPC call.
174 name = path[len(tester):]
175 # Supporting Python 2.5 prevents us from using urlparse.parse_qs
176 query = cgi.parse_qs(query, True)
178 self.server.rpc_lock.acquire()
179 try:
180 self.HandleRPC(name, query)
181 finally:
182 self.server.rpc_lock.release()
184 # Don't reset the timeout. This is not "part of the test", rather it's
185 # used to tell us if the renderer process is still alive.
186 if name == 'JavaScriptIsAlive':
187 self.server.JavaScriptIsAlive()
188 return
190 elif path in self.server.redirect_mapping:
191 dest = self.server.redirect_mapping[path]
192 self.send_response(301, 'Moved')
193 self.send_header('Location', dest)
194 self.end_headers()
195 self.wfile.write(self.error_message_format %
196 {'code': 301,
197 'message': 'Moved',
198 'explain': 'Object moved permanently'})
199 self.server.listener.Log('REDIRECT %s (%s -> %s)' %
200 (self.path, path, dest))
201 else:
202 self.server.listener.Log('GET %s (%s)' % (self.path, path))
203 # A normal GET request for transferring files, etc.
204 f = self.send_head()
205 if f:
206 self.copyfile(f, self.wfile)
207 f.close()
209 self.server.ResetTimeout()
211 def copyfile(self, source, outputfile):
212 # Bandwidth values <= 0.0 are considered infinite
213 if self.server.bandwidth <= 0.0:
214 return SimpleHTTPServer.SimpleHTTPRequestHandler.copyfile(
215 self, source, outputfile)
217 self.server.listener.Log('Simulating %f mbps server BW' %
218 self.server.bandwidth)
219 chunk_size = 1500 # What size to use?
220 bits_per_sec = self.server.bandwidth * 1000000
221 start_time = time.time()
222 data_sent = 0
223 while True:
224 chunk = source.read(chunk_size)
225 if len(chunk) == 0:
226 break
227 cur_elapsed = time.time() - start_time
228 target_elapsed = (data_sent + len(chunk)) * 8 / bits_per_sec
229 if (cur_elapsed < target_elapsed):
230 time.sleep(target_elapsed - cur_elapsed)
231 outputfile.write(chunk)
232 data_sent += len(chunk)
233 self.server.listener.Log('Streamed %d bytes in %f s' %
234 (data_sent, time.time() - start_time))
236 # Disable the built-in logging
237 def log_message(self, format, *args):
238 pass
241 # The ThreadingMixIn allows the server to handle multiple requests
242 # concurently (or at least as concurently as Python allows). This is desirable
243 # because server sockets only allow a limited "backlog" of pending connections
244 # and in the worst case the browser could make multiple connections and exceed
245 # this backlog - causing the server to drop requests. Using ThreadingMixIn
246 # helps reduce the chance this will happen.
247 # There were apparently some problems using this Mixin with Python 2.5, but we
248 # are no longer using anything older than 2.6.
249 class Server(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer):
251 def Configure(
252 self, file_mapping, redirect_mapping, extensions_mapping, allow_404,
253 bandwidth, listener, serving_dirs=[], output_dir=None):
254 self.file_mapping = file_mapping
255 self.redirect_mapping = redirect_mapping
256 self.extensions_mapping.update(extensions_mapping)
257 self.allow_404 = allow_404
258 self.bandwidth = bandwidth
259 self.listener = listener
260 self.rpc_lock = threading.Lock()
261 self.serving_dirs = serving_dirs
262 self.output_dir = output_dir
264 def TestingBegun(self, timeout):
265 self.test_in_progress = True
266 # self.timeout does not affect Python 2.5.
267 self.timeout = timeout
268 self.ResetTimeout()
269 self.JavaScriptIsAlive()
270 # Have we seen any requests from the browser?
271 self.received_request = False
273 def ResetTimeout(self):
274 self.last_activity = time.time()
275 self.received_request = True
277 def JavaScriptIsAlive(self):
278 self.last_js_activity = time.time()
280 def TimeSinceJSHeartbeat(self):
281 return time.time() - self.last_js_activity
283 def TestingEnded(self):
284 self.test_in_progress = False
286 def TimedOut(self, total_time):
287 return (total_time >= 0.0 and
288 (time.time() - self.last_activity) >= total_time)
291 def Create(host, port):
292 server = Server((host, port), RequestHandler)
293 server.extensions_mapping = mimetypes.types_map.copy()
294 server.extensions_mapping.update({
295 '': 'application/octet-stream' # Default
297 return server