Add ICU message format support
[chromium-blink-merge.git] / tools / chrome_proxy / integration_tests / chrome_proxy_metrics.py
blobb25020d5e3bb15d5b2629b826467933668db2186
1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 import logging
6 import os
7 import time
9 from common import chrome_proxy_metrics
10 from common import network_metrics
11 from common.chrome_proxy_metrics import ChromeProxyMetricException
12 from telemetry.page import page_test
13 from telemetry.value import scalar
14 from metrics import Metric
16 class ChromeProxyMetric(network_metrics.NetworkMetric):
17 """A Chrome proxy timeline metric."""
19 def __init__(self):
20 super(ChromeProxyMetric, self).__init__()
21 self.compute_data_saving = True
23 def SetEvents(self, events):
24 """Used for unittest."""
25 self._events = events
27 def ResponseFromEvent(self, event):
28 return chrome_proxy_metrics.ChromeProxyResponse(event)
30 def AddResults(self, tab, results):
31 raise NotImplementedError
33 def AddResultsForDataSaving(self, tab, results):
34 resources_via_proxy = 0
35 resources_from_cache = 0
36 resources_direct = 0
38 super(ChromeProxyMetric, self).AddResults(tab, results)
39 for resp in self.IterResponses(tab):
40 if resp.response.served_from_cache:
41 resources_from_cache += 1
42 if resp.HasChromeProxyViaHeader():
43 resources_via_proxy += 1
44 else:
45 resources_direct += 1
47 if resources_from_cache + resources_via_proxy + resources_direct == 0:
48 raise ChromeProxyMetricException, (
49 'Expected at least one response, but zero responses were received.')
51 results.AddValue(scalar.ScalarValue(
52 results.current_page, 'resources_via_proxy', 'count',
53 resources_via_proxy))
54 results.AddValue(scalar.ScalarValue(
55 results.current_page, 'resources_from_cache', 'count',
56 resources_from_cache))
57 results.AddValue(scalar.ScalarValue(
58 results.current_page, 'resources_direct', 'count', resources_direct))
60 def AddResultsForHeaderValidation(self, tab, results):
61 via_count = 0
63 for resp in self.IterResponses(tab):
64 if resp.IsValidByViaHeader():
65 via_count += 1
66 else:
67 r = resp.response
68 raise ChromeProxyMetricException, (
69 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
70 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
72 if via_count == 0:
73 raise ChromeProxyMetricException, (
74 'Expected at least one response through the proxy, but zero such '
75 'responses were received.')
76 results.AddValue(scalar.ScalarValue(
77 results.current_page, 'checked_via_header', 'count', via_count))
79 def AddResultsForLatency(self, tab, results):
80 # TODO(bustamante): This is a hack to workaround crbug.com/467174,
81 # once fixed just pull down window.performance.timing object and
82 # reference that everywhere.
83 load_event_start = tab.EvaluateJavaScript(
84 'window.performance.timing.loadEventStart')
85 navigation_start = tab.EvaluateJavaScript(
86 'window.performance.timing.navigationStart')
87 dom_content_loaded_event_start = tab.EvaluateJavaScript(
88 'window.performance.timing.domContentLoadedEventStart')
89 fetch_start = tab.EvaluateJavaScript(
90 'window.performance.timing.fetchStart')
91 request_start = tab.EvaluateJavaScript(
92 'window.performance.timing.requestStart')
93 domain_lookup_end = tab.EvaluateJavaScript(
94 'window.performance.timing.domainLookupEnd')
95 domain_lookup_start = tab.EvaluateJavaScript(
96 'window.performance.timing.domainLookupStart')
97 connect_end = tab.EvaluateJavaScript(
98 'window.performance.timing.connectEnd')
99 connect_start = tab.EvaluateJavaScript(
100 'window.performance.timing.connectStart')
101 response_end = tab.EvaluateJavaScript(
102 'window.performance.timing.responseEnd')
103 response_start = tab.EvaluateJavaScript(
104 'window.performance.timing.responseStart')
106 # NavigationStart relative markers in milliseconds.
107 load_start = (float(load_event_start) - navigation_start)
108 results.AddValue(scalar.ScalarValue(
109 results.current_page, 'load_start', 'ms', load_start))
111 dom_content_loaded_start = (
112 float(dom_content_loaded_event_start) - navigation_start)
113 results.AddValue(scalar.ScalarValue(
114 results.current_page, 'dom_content_loaded_start', 'ms',
115 dom_content_loaded_start))
117 fetch_start = (float(fetch_start) - navigation_start)
118 results.AddValue(scalar.ScalarValue(
119 results.current_page, 'fetch_start', 'ms', fetch_start,
120 important=False))
122 request_start = (float(request_start) - navigation_start)
123 results.AddValue(scalar.ScalarValue(
124 results.current_page, 'request_start', 'ms', request_start,
125 important=False))
127 # Phase measurements in milliseconds.
128 domain_lookup_duration = (float(domain_lookup_end) - domain_lookup_start)
129 results.AddValue(scalar.ScalarValue(
130 results.current_page, 'domain_lookup_duration', 'ms',
131 domain_lookup_duration, important=False))
133 connect_duration = (float(connect_end) - connect_start)
134 results.AddValue(scalar.ScalarValue(
135 results.current_page, 'connect_duration', 'ms', connect_duration,
136 important=False))
138 request_duration = (float(response_start) - request_start)
139 results.AddValue(scalar.ScalarValue(
140 results.current_page, 'request_duration', 'ms', request_duration,
141 important=False))
143 response_duration = (float(response_end) - response_start)
144 results.AddValue(scalar.ScalarValue(
145 results.current_page, 'response_duration', 'ms', response_duration,
146 important=False))
148 def AddResultsForExtraViaHeader(self, tab, results, extra_via_header):
149 extra_via_count = 0
151 for resp in self.IterResponses(tab):
152 if resp.HasChromeProxyViaHeader():
153 if resp.HasExtraViaHeader(extra_via_header):
154 extra_via_count += 1
155 else:
156 raise ChromeProxyMetricException, (
157 '%s: Should have via header %s.' % (resp.response.url,
158 extra_via_header))
160 results.AddValue(scalar.ScalarValue(
161 results.current_page, 'extra_via_header', 'count', extra_via_count))
163 def AddResultsForClientVersion(self, tab, results):
164 via_count = 0
165 for resp in self.IterResponses(tab):
166 r = resp.response
167 if resp.response.status != 200:
168 raise ChromeProxyMetricException, ('%s: Response is not 200: %d' %
169 (r.url, r.status))
170 if not resp.IsValidByViaHeader():
171 raise ChromeProxyMetricException, ('%s: Response missing via header' %
172 (r.url))
173 via_count += 1
175 if via_count == 0:
176 raise ChromeProxyMetricException, (
177 'Expected at least one response through the proxy, but zero such '
178 'responses were received.')
179 results.AddValue(scalar.ScalarValue(
180 results.current_page, 'responses_via_proxy', 'count', via_count))
182 def GetClientTypeFromRequests(self, tab):
183 """Get the Chrome-Proxy client type value from requests made in this tab.
185 Returns:
186 The client type value from the first request made in this tab that
187 specifies a client type in the Chrome-Proxy request header. See
188 ChromeProxyResponse.GetChromeProxyClientType for more details about the
189 Chrome-Proxy client type. Returns None if none of the requests made in
190 this tab specify a client type.
192 for resp in self.IterResponses(tab):
193 client_type = resp.GetChromeProxyClientType()
194 if client_type:
195 return client_type
196 return None
198 def AddResultsForClientType(self, tab, results, client_type,
199 bypass_for_client_type):
200 via_count = 0
201 bypass_count = 0
203 for resp in self.IterResponses(tab):
204 if resp.HasChromeProxyViaHeader():
205 via_count += 1
206 if client_type.lower() == bypass_for_client_type.lower():
207 raise ChromeProxyMetricException, (
208 '%s: Response for client of type "%s" has via header, but should '
209 'be bypassed.' % (resp.response.url, bypass_for_client_type))
210 elif resp.ShouldHaveChromeProxyViaHeader():
211 bypass_count += 1
212 if client_type.lower() != bypass_for_client_type.lower():
213 raise ChromeProxyMetricException, (
214 '%s: Response missing via header. Only "%s" clients should '
215 'bypass for this page, but this client is "%s".' % (
216 resp.response.url, bypass_for_client_type, client_type))
218 if via_count + bypass_count == 0:
219 raise ChromeProxyMetricException, (
220 'Expected at least one response that was eligible to be proxied, but '
221 'zero such responses were received.')
223 results.AddValue(scalar.ScalarValue(
224 results.current_page, 'via', 'count', via_count))
225 results.AddValue(scalar.ScalarValue(
226 results.current_page, 'bypass', 'count', bypass_count))
228 def AddResultsForLoFi(self, tab, results):
229 lo_fi_request_count = 0
230 lo_fi_response_count = 0
232 for resp in self.IterResponses(tab):
233 if resp.HasChromeProxyLoFiRequest():
234 lo_fi_request_count += 1
235 else:
236 raise ChromeProxyMetricException, (
237 '%s: LoFi not in request header.' % (resp.response.url))
239 if resp.HasChromeProxyLoFiResponse():
240 lo_fi_response_count += 1
241 else:
242 raise ChromeProxyMetricException, (
243 '%s: LoFi not in response header.' % (resp.response.url))
245 if resp.content_length > 100:
246 raise ChromeProxyMetricException, (
247 'Image %s is %d bytes. Expecting less than 100 bytes.' %
248 (resp.response.url, resp.content_length))
250 if lo_fi_request_count == 0:
251 raise ChromeProxyMetricException, (
252 'Expected at least one LoFi request, but zero such requests were '
253 'sent.')
254 if lo_fi_response_count == 0:
255 raise ChromeProxyMetricException, (
256 'Expected at least one LoFi response, but zero such responses were '
257 'received.')
259 results.AddValue(scalar.ScalarValue(
260 results.current_page, 'lo_fi_request', 'count', lo_fi_request_count))
261 results.AddValue(scalar.ScalarValue(
262 results.current_page, 'lo_fi_response', 'count', lo_fi_response_count))
263 super(ChromeProxyMetric, self).AddResults(tab, results)
265 def AddResultsForPassThrough(self, tab, results):
266 compressed_count = 0
267 compressed_size = 0
268 pass_through_count = 0
269 pass_through_size = 0
271 for resp in self.IterResponses(tab):
272 if 'favicon.ico' in resp.response.url:
273 continue
274 if not resp.HasChromeProxyViaHeader():
275 r = resp.response
276 raise ChromeProxyMetricException, (
277 '%s: Should have Via header (%s) (refer=%s, status=%d)' % (
278 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
279 if resp.HasChromeProxyPassThroughRequest():
280 pass_through_count += 1
281 pass_through_size = resp.content_length
282 else:
283 compressed_count += 1
284 compressed_size = resp.content_length
286 if pass_through_count != 1:
287 raise ChromeProxyMetricException, (
288 'Expected exactly one Chrome-Proxy pass-through request, but %d '
289 'such requests were sent.' % (pass_through_count))
291 if compressed_count != 1:
292 raise ChromeProxyMetricException, (
293 'Expected exactly one compressed request, but %d such requests were '
294 'received.' % (compressed_count))
296 if compressed_size >= pass_through_size:
297 raise ChromeProxyMetricException, (
298 'Compressed image is %d bytes and pass-through image is %d. '
299 'Expecting compressed image size to be less than pass-through '
300 'image.' % (compressed_size, pass_through_size))
302 results.AddValue(scalar.ScalarValue(
303 results.current_page, 'compressed', 'count', compressed_count))
304 results.AddValue(scalar.ScalarValue(
305 results.current_page, 'compressed_size', 'bytes', compressed_size))
306 results.AddValue(scalar.ScalarValue(
307 results.current_page, 'pass_through', 'count', pass_through_count))
308 results.AddValue(scalar.ScalarValue(
309 results.current_page, 'pass_through_size', 'bytes', pass_through_size))
311 def AddResultsForBypass(self, tab, results, url_pattern=""):
312 bypass_count = 0
313 skipped_count = 0
315 for resp in self.IterResponses(tab):
316 # Only check the url's that contain the specified pattern.
317 if url_pattern and url_pattern not in resp.response.url:
318 skipped_count += 1
319 continue
321 if resp.HasChromeProxyViaHeader():
322 r = resp.response
323 raise ChromeProxyMetricException, (
324 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % (
325 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
326 bypass_count += 1
328 if bypass_count == 0:
329 raise ChromeProxyMetricException, (
330 'Expected at least one response to be bypassed, but zero such '
331 'responses were received.')
333 results.AddValue(scalar.ScalarValue(
334 results.current_page, 'bypass', 'count', bypass_count))
335 results.AddValue(scalar.ScalarValue(
336 results.current_page, 'skipped', 'count', skipped_count))
338 def AddResultsForCorsBypass(self, tab, results):
339 eligible_response_count = 0
340 bypass_count = 0
341 bypasses = {}
342 for resp in self.IterResponses(tab):
343 logging.warn('got a resource %s' % (resp.response.url))
345 for resp in self.IterResponses(tab):
346 if resp.ShouldHaveChromeProxyViaHeader():
347 eligible_response_count += 1
348 if not resp.HasChromeProxyViaHeader():
349 bypass_count += 1
350 elif resp.response.status == 502:
351 bypasses[resp.response.url] = 0
353 for resp in self.IterResponses(tab):
354 if resp.ShouldHaveChromeProxyViaHeader():
355 if not resp.HasChromeProxyViaHeader():
356 if resp.response.status == 200:
357 if (bypasses.has_key(resp.response.url)):
358 bypasses[resp.response.url] = bypasses[resp.response.url] + 1
360 for url in bypasses:
361 if bypasses[url] == 0:
362 raise ChromeProxyMetricException, (
363 '%s: Got a 502 without a subsequent 200' % (url))
364 elif bypasses[url] > 1:
365 raise ChromeProxyMetricException, (
366 '%s: Got a 502 and multiple 200s: %d' % (url, bypasses[url]))
367 if bypass_count == 0:
368 raise ChromeProxyMetricException, (
369 'At least one response should be bypassed. '
370 '(eligible_response_count=%d, bypass_count=%d)\n' % (
371 eligible_response_count, bypass_count))
373 results.AddValue(scalar.ScalarValue(
374 results.current_page, 'cors_bypass', 'count', bypass_count))
376 def AddResultsForBlockOnce(self, tab, results):
377 eligible_response_count = 0
378 via_proxy = 0
380 for resp in self.IterResponses(tab):
381 # Block-once test URLs (Data Reduction Proxy always returns
382 # block-once) should not have the Chrome-Compression-Proxy Via header.
383 if (IsTestUrlForBlockOnce(resp.response.url)):
384 eligible_response_count += 1
385 if resp.HasChromeProxyViaHeader():
386 raise ChromeProxyMetricException, (
387 'Response has a Chrome-Compression-Proxy Via header: ' +
388 resp.response.url)
389 elif resp.ShouldHaveChromeProxyViaHeader():
390 via_proxy += 1
391 if not resp.HasChromeProxyViaHeader():
392 # For all other URLs, confirm that via header is present if expected.
393 raise ChromeProxyMetricException, (
394 'Missing Chrome-Compression-Proxy Via header.' +
395 resp.response.url)
397 if via_proxy == 0:
398 raise ChromeProxyMetricException, (
399 'None of the requests went via data reduction proxy')
401 if (eligible_response_count != 2):
402 raise ChromeProxyMetricException, (
403 'Did not make expected number of requests to whitelisted block-once'
404 ' test URLs. Expected: 2, Actual: ' + str(eligible_response_count))
406 results.AddValue(scalar.ScalarValue(results.current_page,
407 'BlockOnce_success', 'num_eligible_response', 2))
410 def AddResultsForSafebrowsingOn(self, tab, results):
411 results.AddValue(scalar.ScalarValue(
412 results.current_page, 'safebrowsing', 'timeout responses', 1))
414 def AddResultsForSafebrowsingOff(self, tab, results):
415 response_count = 0
416 for resp in self.IterResponses(tab):
417 # Data reduction proxy should return the real response for sites with
418 # malware.
419 response_count += 1
420 if not resp.HasChromeProxyViaHeader():
421 r = resp.response
422 raise ChromeProxyMetricException, (
423 '%s: Safebrowsing feature should be off for desktop and webview.\n'
424 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
425 r.url, r.status, r.status_text, r.headers))
427 if response_count == 0:
428 raise ChromeProxyMetricException, (
429 'Safebrowsing test failed: No valid responses received')
431 results.AddValue(scalar.ScalarValue(
432 results.current_page, 'safebrowsing', 'responses', response_count))
434 def AddResultsForHTTPFallback(self, tab, results):
435 via_fallback_count = 0
437 for resp in self.IterResponses(tab):
438 if resp.ShouldHaveChromeProxyViaHeader():
439 # All responses should have come through the HTTP fallback proxy, which
440 # means that they should have the via header, and if a remote port is
441 # defined, it should be port 80.
442 if (not resp.HasChromeProxyViaHeader() or
443 (resp.remote_port and resp.remote_port != 80)):
444 r = resp.response
445 raise ChromeProxyMetricException, (
446 '%s: Should have come through the fallback proxy.\n'
447 'Reponse: remote_port=%s status=(%d, %s)\nHeaders:\n %s' % (
448 r.url, str(resp.remote_port), r.status, r.status_text,
449 r.headers))
450 via_fallback_count += 1
452 if via_fallback_count == 0:
453 raise ChromeProxyMetricException, (
454 'Expected at least one response through the fallback proxy, but zero '
455 'such responses were received.')
456 results.AddValue(scalar.ScalarValue(
457 results.current_page, 'via_fallback', 'count', via_fallback_count))
459 def AddResultsForHTTPToDirectFallback(self, tab, results,
460 fallback_response_host):
461 via_fallback_count = 0
462 bypass_count = 0
463 responses = self.IterResponses(tab)
465 # The first response(s) coming from fallback_response_host should be
466 # through the HTTP fallback proxy.
467 resp = next(responses, None)
468 while resp and fallback_response_host in resp.response.url:
469 if fallback_response_host in resp.response.url:
470 if (not resp.HasChromeProxyViaHeader() or resp.remote_port != 80):
471 r = resp.response
472 raise ChromeProxyMetricException, (
473 'Response for %s should have come through the fallback proxy.\n'
474 'Response: remote_port=%s status=(%d, %s)\nHeaders:\n %s' % (
475 r.url, str(resp.remote_port), r.status, r.status_text,
476 r.headers))
477 else:
478 via_fallback_count += 1
479 resp = next(responses, None)
481 # All other responses should be bypassed.
482 while resp:
483 if resp.HasChromeProxyViaHeader():
484 r = resp.response
485 raise ChromeProxyMetricException, (
486 'Response for %s should not have via header.\n'
487 'Response: status=(%d, %s)\nHeaders:\n %s' % (
488 r.url, r.status, r.status_text, r.headers))
489 else:
490 bypass_count += 1
491 resp = next(responses, None)
493 # At least one response should go through the http proxy and be bypassed.
494 if via_fallback_count == 0 or bypass_count == 0:
495 raise ChromeProxyMetricException(
496 'There should be at least one response through the fallback proxy '
497 '(actual %s) and at least one bypassed response (actual %s)' %
498 (via_fallback_count, bypass_count))
500 results.AddValue(scalar.ScalarValue(
501 results.current_page, 'via_fallback', 'count', via_fallback_count))
502 results.AddValue(scalar.ScalarValue(
503 results.current_page, 'bypass', 'count', bypass_count))
505 def AddResultsForReenableAfterBypass(
506 self, tab, results, bypass_seconds_min, bypass_seconds_max):
507 """Verify results for a re-enable after bypass test.
509 Args:
510 tab: the tab for the test.
511 results: the results object to add the results values to.
512 bypass_seconds_min: the minimum duration of the bypass.
513 bypass_seconds_max: the maximum duration of the bypass.
515 bypass_count = 0
516 via_count = 0
518 for resp in self.IterResponses(tab):
519 if resp.HasChromeProxyViaHeader():
520 r = resp.response
521 raise ChromeProxyMetricException, (
522 'Response for %s should not have via header.\n'
523 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
524 r.url, r.status, r.status_text, r.headers))
525 else:
526 bypass_count += 1
528 # Wait until 30 seconds before the bypass should expire, and fetch a page.
529 # It should not have the via header because the proxy should still be
530 # bypassed.
531 time.sleep(bypass_seconds_min - 30)
533 tab.ClearCache(force=True)
534 before_metrics = ChromeProxyMetric()
535 before_metrics.Start(results.current_page, tab)
536 tab.Navigate('http://chromeproxy-test.appspot.com/default')
537 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 10)
538 before_metrics.Stop(results.current_page, tab)
540 for resp in before_metrics.IterResponses(tab):
541 if resp.HasChromeProxyViaHeader():
542 r = resp.response
543 raise ChromeProxyMetricException, (
544 'Response for %s should not have via header; proxy should still '
545 'be bypassed.\nReponse: status=(%d, %s)\nHeaders:\n %s' % (
546 r.url, r.status, r.status_text, r.headers))
547 else:
548 bypass_count += 1
549 if bypass_count == 0:
550 raise ChromeProxyMetricException, (
551 'Expected at least one response to be bypassed before the bypass '
552 'expired, but zero such responses were received.')
554 # Wait until 30 seconds after the bypass should expire, and fetch a page. It
555 # should have the via header since the proxy should no longer be bypassed.
556 time.sleep((bypass_seconds_max + 30) - (bypass_seconds_min - 30))
558 tab.ClearCache(force=True)
559 after_metrics = ChromeProxyMetric()
560 after_metrics.Start(results.current_page, tab)
561 tab.Navigate('http://chromeproxy-test.appspot.com/default')
562 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 10)
563 after_metrics.Stop(results.current_page, tab)
565 for resp in after_metrics.IterResponses(tab):
566 if not resp.HasChromeProxyViaHeader():
567 r = resp.response
568 raise ChromeProxyMetricException, (
569 'Response for %s should have via header; proxy should no longer '
570 'be bypassed.\nReponse: status=(%d, %s)\nHeaders:\n %s' % (
571 r.url, r.status, r.status_text, r.headers))
572 else:
573 via_count += 1
574 if via_count == 0:
575 raise ChromeProxyMetricException, (
576 'Expected at least one response through the proxy after the bypass '
577 'expired, but zero such responses were received.')
579 results.AddValue(scalar.ScalarValue(
580 results.current_page, 'bypass', 'count', bypass_count))
581 results.AddValue(scalar.ScalarValue(
582 results.current_page, 'via', 'count', via_count))
584 def AddResultsForClientConfig(self, tab, results):
585 resources_with_old_auth = 0
586 resources_with_new_auth = 0
588 super(ChromeProxyMetric, self).AddResults(tab, results)
589 for resp in self.IterResponses(tab):
590 if resp.GetChromeProxyRequestHeaderValue('s') != None:
591 resources_with_new_auth += 1
592 if resp.GetChromeProxyRequestHeaderValue('ps') != None:
593 resources_with_old_auth += 1
595 if resources_with_old_auth != 0:
596 raise ChromeProxyMetricException, (
597 'Expected zero responses with the old authentication scheme but '
598 'received %d.' % resources_with_old_auth)
600 if resources_with_new_auth == 0:
601 raise ChromeProxyMetricException, (
602 'Expected at least one response with the new authentication scheme, '
603 'but zero such responses were received.')
605 results.AddValue(scalar.ScalarValue(
606 results.current_page, 'new_auth', 'count', resources_with_new_auth))
607 results.AddValue(scalar.ScalarValue(
608 results.current_page, 'old_auth', 'count', resources_with_old_auth))
610 PROXIED = 'proxied'
611 DIRECT = 'direct'
613 class ChromeProxyVideoMetric(network_metrics.NetworkMetric):
614 """Metrics for video pages.
616 Wraps the video metrics produced by videowrapper.js, such as the video
617 duration and size in pixels. Also checks a few basic HTTP response headers
618 such as Content-Type and Content-Length in the video responses.
621 def __init__(self, tab):
622 super(ChromeProxyVideoMetric, self).__init__()
623 with open(os.path.join(os.path.dirname(__file__), 'videowrapper.js')) as f:
624 js = f.read()
625 tab.ExecuteJavaScript(js)
627 def Start(self, page, tab):
628 tab.ExecuteJavaScript('window.__chromeProxyCreateVideoWrappers()')
629 self.videoMetrics = None
630 super(ChromeProxyVideoMetric, self).Start(page, tab)
632 def Stop(self, page, tab):
633 tab.WaitForJavaScriptExpression('window.__chromeProxyVideoLoaded', 30)
634 m = tab.EvaluateJavaScript('window.__chromeProxyVideoMetrics')
636 # Now wait for the video to stop playing.
637 # Give it 2x the total duration to account for buffering.
638 waitTime = 2 * m['video_duration']
639 tab.WaitForJavaScriptExpression('window.__chromeProxyVideoEnded', waitTime)
641 # Load the final metrics.
642 m = tab.EvaluateJavaScript('window.__chromeProxyVideoMetrics')
643 self.videoMetrics = m
644 # Cast this to an integer as it is often approximate (for an unknown reason)
645 m['video_duration'] = int(m['video_duration'])
646 super(ChromeProxyVideoMetric, self).Stop(page, tab)
648 def ResponseFromEvent(self, event):
649 return chrome_proxy_metrics.ChromeProxyResponse(event)
651 def AddResults(self, tab, results):
652 raise NotImplementedError
654 def AddResultsForProxied(self, tab, results):
655 return self._AddResultsShared(PROXIED, tab, results)
657 def AddResultsForDirect(self, tab, results):
658 return self._AddResultsShared(DIRECT, tab, results)
660 def _AddResultsShared(self, kind, tab, results):
661 def err(s):
662 raise ChromeProxyMetricException, s
664 # Should have played the video.
665 if not self.videoMetrics['ready']:
666 err('%s: video not played' % kind)
668 # Should have an HTTP response for the video.
669 wantContentType = 'video/webm' if kind == PROXIED else 'video/mp4'
670 found = False
671 for r in self.IterResponses(tab):
672 resp = r.response
673 if kind == DIRECT and r.HasChromeProxyViaHeader():
674 err('%s: page has proxied Via header' % kind)
675 if resp.GetHeader('Content-Type') != wantContentType:
676 continue
677 if found:
678 err('%s: multiple video responses' % kind)
679 found = True
681 cl = resp.GetHeader('Content-Length')
682 xocl = resp.GetHeader('X-Original-Content-Length')
683 if cl != None:
684 self.videoMetrics['content_length_header'] = int(cl)
685 if xocl != None:
686 self.videoMetrics['x_original_content_length_header'] = int(xocl)
688 # Should have CL always.
689 if cl == None:
690 err('%s: missing ContentLength' % kind)
691 # Proxied: should have CL < XOCL
692 # Direct: should not have XOCL
693 if kind == PROXIED:
694 if xocl == None or int(cl) >= int(xocl):
695 err('%s: bigger response (%s > %s)' % (kind, str(cl), str(xocl)))
696 else:
697 if xocl != None:
698 err('%s: has XOriginalContentLength' % kind)
700 if not found:
701 err('%s: missing video response' % kind)
703 # Finally, add all the metrics to the results.
704 for (k,v) in self.videoMetrics.iteritems():
705 k = "%s_%s" % (k, kind)
706 results.AddValue(scalar.ScalarValue(results.current_page, k, "", v))
708 class ChromeProxyInstrumentedVideoMetric(Metric):
709 """Metric for pages instrumented to evaluate video transcoding."""
710 def __init__(self):
711 super(ChromeProxyInstrumentedVideoMetric, self).__init__()
713 def Stop(self, page, tab):
714 waitTime = tab.EvaluateJavaScript('test.waitTime')
715 tab.WaitForJavaScriptExpression('test.metrics.complete', waitTime)
716 super(ChromeProxyInstrumentedVideoMetric, self).Stop(page, tab)
718 def AddResults(self, tab, results):
719 metrics = tab.EvaluateJavaScript('test.metrics')
720 for (k,v) in metrics.iteritems():
721 results.AddValue(scalar.ScalarValue(results.current_page, k, '', v))
722 try:
723 complete = metrics['complete']
724 failed = metrics['failed']
725 if not complete:
726 raise ChromeProxyMetricException, 'Test not complete'
727 if failed:
728 raise ChromeProxyMetricException, 'failed'
729 except KeyError:
730 raise ChromeProxyMetricException, 'No metrics found'
732 # Returns whether |url| is a block-once test URL. Data Reduction Proxy has been
733 # configured to always return block-once for these URLs.
734 def IsTestUrlForBlockOnce(url):
735 return (url == 'http://check.googlezip.net/blocksingle/' or
736 url == 'http://chromeproxy-test.appspot.com/default?respBody=T0s=&respStatus=200&flywheelAction=block-once')