Remove use of JSONReader::DeprecatedRead from activity_log
[chromium-blink-merge.git] / tools / chrome_proxy / integration_tests / chrome_proxy_metrics.py
blob669168fb92171083c4881f12514dcdf1f6257075
1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 import logging
6 import os
7 import time
9 from common import chrome_proxy_metrics
10 from common import network_metrics
11 from common.chrome_proxy_metrics import ChromeProxyMetricException
12 from telemetry.page import page_test
13 from telemetry.value import scalar
14 from metrics import Metric
16 class ChromeProxyMetric(network_metrics.NetworkMetric):
17 """A Chrome proxy timeline metric."""
19 def __init__(self):
20 super(ChromeProxyMetric, self).__init__()
21 self.compute_data_saving = True
23 def SetEvents(self, events):
24 """Used for unittest."""
25 self._events = events
27 def ResponseFromEvent(self, event):
28 return chrome_proxy_metrics.ChromeProxyResponse(event)
30 def AddResults(self, tab, results):
31 raise NotImplementedError
33 def AddResultsForDataSaving(self, tab, results):
34 resources_via_proxy = 0
35 resources_from_cache = 0
36 resources_direct = 0
38 super(ChromeProxyMetric, self).AddResults(tab, results)
39 for resp in self.IterResponses(tab):
40 if resp.response.served_from_cache:
41 resources_from_cache += 1
42 if resp.HasChromeProxyViaHeader():
43 resources_via_proxy += 1
44 else:
45 resources_direct += 1
47 if resources_from_cache + resources_via_proxy + resources_direct == 0:
48 raise ChromeProxyMetricException, (
49 'Expected at least one response, but zero responses were received.')
51 results.AddValue(scalar.ScalarValue(
52 results.current_page, 'resources_via_proxy', 'count',
53 resources_via_proxy))
54 results.AddValue(scalar.ScalarValue(
55 results.current_page, 'resources_from_cache', 'count',
56 resources_from_cache))
57 results.AddValue(scalar.ScalarValue(
58 results.current_page, 'resources_direct', 'count', resources_direct))
60 def AddResultsForHeaderValidation(self, tab, results):
61 via_count = 0
63 for resp in self.IterResponses(tab):
64 if resp.IsValidByViaHeader():
65 via_count += 1
66 else:
67 r = resp.response
68 raise ChromeProxyMetricException, (
69 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
70 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
72 if via_count == 0:
73 raise ChromeProxyMetricException, (
74 'Expected at least one response through the proxy, but zero such '
75 'responses were received.')
76 results.AddValue(scalar.ScalarValue(
77 results.current_page, 'checked_via_header', 'count', via_count))
79 def AddResultsForLatency(self, tab, results):
80 # TODO(bustamante): This is a hack to workaround crbug.com/467174,
81 # once fixed just pull down window.performance.timing object and
82 # reference that everywhere.
83 load_event_start = tab.EvaluateJavaScript(
84 'window.performance.timing.loadEventStart')
85 navigation_start = tab.EvaluateJavaScript(
86 'window.performance.timing.navigationStart')
87 dom_content_loaded_event_start = tab.EvaluateJavaScript(
88 'window.performance.timing.domContentLoadedEventStart')
89 fetch_start = tab.EvaluateJavaScript(
90 'window.performance.timing.fetchStart')
91 request_start = tab.EvaluateJavaScript(
92 'window.performance.timing.requestStart')
93 domain_lookup_end = tab.EvaluateJavaScript(
94 'window.performance.timing.domainLookupEnd')
95 domain_lookup_start = tab.EvaluateJavaScript(
96 'window.performance.timing.domainLookupStart')
97 connect_end = tab.EvaluateJavaScript(
98 'window.performance.timing.connectEnd')
99 connect_start = tab.EvaluateJavaScript(
100 'window.performance.timing.connectStart')
101 response_end = tab.EvaluateJavaScript(
102 'window.performance.timing.responseEnd')
103 response_start = tab.EvaluateJavaScript(
104 'window.performance.timing.responseStart')
106 # NavigationStart relative markers in milliseconds.
107 load_start = (float(load_event_start) - navigation_start)
108 results.AddValue(scalar.ScalarValue(
109 results.current_page, 'load_start', 'ms', load_start))
111 dom_content_loaded_start = (
112 float(dom_content_loaded_event_start) - navigation_start)
113 results.AddValue(scalar.ScalarValue(
114 results.current_page, 'dom_content_loaded_start', 'ms',
115 dom_content_loaded_start))
117 fetch_start = (float(fetch_start) - navigation_start)
118 results.AddValue(scalar.ScalarValue(
119 results.current_page, 'fetch_start', 'ms', fetch_start,
120 important=False))
122 request_start = (float(request_start) - navigation_start)
123 results.AddValue(scalar.ScalarValue(
124 results.current_page, 'request_start', 'ms', request_start,
125 important=False))
127 response_start = (float(response_start) - navigation_start)
128 results.AddValue(scalar.ScalarValue(
129 results.current_page, 'response_start', 'ms', response_start,
130 important=False))
132 response_end = (float(response_end) - navigation_start)
133 results.AddValue(scalar.ScalarValue(
134 results.current_page, 'response_end', 'ms', response_end,
135 important=False))
137 # Phase measurements in milliseconds.
138 domain_lookup_duration = (float(domain_lookup_end) - domain_lookup_start)
139 results.AddValue(scalar.ScalarValue(
140 results.current_page, 'domain_lookup_duration', 'ms',
141 domain_lookup_duration, important=False))
143 connect_duration = (float(connect_end) - connect_start)
144 results.AddValue(scalar.ScalarValue(
145 results.current_page, 'connect_duration', 'ms', connect_duration,
146 important=False))
148 request_duration = (float(response_start) - request_start)
149 results.AddValue(scalar.ScalarValue(
150 results.current_page, 'request_duration', 'ms', request_duration,
151 important=False))
153 response_duration = (float(response_end) - response_start)
154 results.AddValue(scalar.ScalarValue(
155 results.current_page, 'response_duration', 'ms', response_duration,
156 important=False))
158 def AddResultsForExtraViaHeader(self, tab, results, extra_via_header):
159 extra_via_count = 0
161 for resp in self.IterResponses(tab):
162 if resp.HasChromeProxyViaHeader():
163 if resp.HasExtraViaHeader(extra_via_header):
164 extra_via_count += 1
165 else:
166 raise ChromeProxyMetricException, (
167 '%s: Should have via header %s.' % (resp.response.url,
168 extra_via_header))
170 results.AddValue(scalar.ScalarValue(
171 results.current_page, 'extra_via_header', 'count', extra_via_count))
173 def AddResultsForClientVersion(self, tab, results):
174 via_count = 0
175 for resp in self.IterResponses(tab):
176 r = resp.response
177 if resp.response.status != 200:
178 raise ChromeProxyMetricException, ('%s: Response is not 200: %d' %
179 (r.url, r.status))
180 if not resp.IsValidByViaHeader():
181 raise ChromeProxyMetricException, ('%s: Response missing via header' %
182 (r.url))
183 via_count += 1
185 if via_count == 0:
186 raise ChromeProxyMetricException, (
187 'Expected at least one response through the proxy, but zero such '
188 'responses were received.')
189 results.AddValue(scalar.ScalarValue(
190 results.current_page, 'responses_via_proxy', 'count', via_count))
192 def GetClientTypeFromRequests(self, tab):
193 """Get the Chrome-Proxy client type value from requests made in this tab.
195 Returns:
196 The client type value from the first request made in this tab that
197 specifies a client type in the Chrome-Proxy request header. See
198 ChromeProxyResponse.GetChromeProxyClientType for more details about the
199 Chrome-Proxy client type. Returns None if none of the requests made in
200 this tab specify a client type.
202 for resp in self.IterResponses(tab):
203 client_type = resp.GetChromeProxyClientType()
204 if client_type:
205 return client_type
206 return None
208 def AddResultsForClientType(self, tab, results, client_type,
209 bypass_for_client_type):
210 via_count = 0
211 bypass_count = 0
213 for resp in self.IterResponses(tab):
214 if resp.HasChromeProxyViaHeader():
215 via_count += 1
216 if client_type.lower() == bypass_for_client_type.lower():
217 raise ChromeProxyMetricException, (
218 '%s: Response for client of type "%s" has via header, but should '
219 'be bypassed.' % (resp.response.url, bypass_for_client_type))
220 elif resp.ShouldHaveChromeProxyViaHeader():
221 bypass_count += 1
222 if client_type.lower() != bypass_for_client_type.lower():
223 raise ChromeProxyMetricException, (
224 '%s: Response missing via header. Only "%s" clients should '
225 'bypass for this page, but this client is "%s".' % (
226 resp.response.url, bypass_for_client_type, client_type))
228 if via_count + bypass_count == 0:
229 raise ChromeProxyMetricException, (
230 'Expected at least one response that was eligible to be proxied, but '
231 'zero such responses were received.')
233 results.AddValue(scalar.ScalarValue(
234 results.current_page, 'via', 'count', via_count))
235 results.AddValue(scalar.ScalarValue(
236 results.current_page, 'bypass', 'count', bypass_count))
238 def AddResultsForLoFi(self, tab, results):
239 lo_fi_request_count = 0
240 lo_fi_response_count = 0
242 for resp in self.IterResponses(tab):
243 if 'favicon.ico' in resp.response.url:
244 continue
246 if resp.HasChromeProxyLoFiRequest():
247 lo_fi_request_count += 1
248 else:
249 raise ChromeProxyMetricException, (
250 '%s: LoFi not in request header.' % (resp.response.url))
252 if resp.HasChromeProxyLoFiResponse():
253 lo_fi_response_count += 1
254 else:
255 raise ChromeProxyMetricException, (
256 '%s: LoFi not in response header.' % (resp.response.url))
258 if resp.content_length > 100:
259 raise ChromeProxyMetricException, (
260 'Image %s is %d bytes. Expecting less than 100 bytes.' %
261 (resp.response.url, resp.content_length))
263 if lo_fi_request_count == 0:
264 raise ChromeProxyMetricException, (
265 'Expected at least one LoFi request, but zero such requests were '
266 'sent.')
267 if lo_fi_response_count == 0:
268 raise ChromeProxyMetricException, (
269 'Expected at least one LoFi response, but zero such responses were '
270 'received.')
272 results.AddValue(scalar.ScalarValue(
273 results.current_page, 'lo_fi_request', 'count', lo_fi_request_count))
274 results.AddValue(scalar.ScalarValue(
275 results.current_page, 'lo_fi_response', 'count', lo_fi_response_count))
276 super(ChromeProxyMetric, self).AddResults(tab, results)
278 def AddResultsForPassThrough(self, tab, results):
279 compressed_count = 0
280 compressed_size = 0
281 pass_through_count = 0
282 pass_through_size = 0
284 for resp in self.IterResponses(tab):
285 if 'favicon.ico' in resp.response.url:
286 continue
287 if not resp.HasChromeProxyViaHeader():
288 r = resp.response
289 raise ChromeProxyMetricException, (
290 '%s: Should have Via header (%s) (refer=%s, status=%d)' % (
291 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
292 if resp.HasChromeProxyPassThroughRequest():
293 pass_through_count += 1
294 pass_through_size = resp.content_length
295 else:
296 compressed_count += 1
297 compressed_size = resp.content_length
299 if pass_through_count != 1:
300 raise ChromeProxyMetricException, (
301 'Expected exactly one Chrome-Proxy pass-through request, but %d '
302 'such requests were sent.' % (pass_through_count))
304 if compressed_count != 1:
305 raise ChromeProxyMetricException, (
306 'Expected exactly one compressed request, but %d such requests were '
307 'received.' % (compressed_count))
309 if compressed_size >= pass_through_size:
310 raise ChromeProxyMetricException, (
311 'Compressed image is %d bytes and pass-through image is %d. '
312 'Expecting compressed image size to be less than pass-through '
313 'image.' % (compressed_size, pass_through_size))
315 results.AddValue(scalar.ScalarValue(
316 results.current_page, 'compressed', 'count', compressed_count))
317 results.AddValue(scalar.ScalarValue(
318 results.current_page, 'compressed_size', 'bytes', compressed_size))
319 results.AddValue(scalar.ScalarValue(
320 results.current_page, 'pass_through', 'count', pass_through_count))
321 results.AddValue(scalar.ScalarValue(
322 results.current_page, 'pass_through_size', 'bytes', pass_through_size))
324 def AddResultsForBypass(self, tab, results, url_pattern=""):
325 bypass_count = 0
326 skipped_count = 0
328 for resp in self.IterResponses(tab):
329 # Only check the url's that contain the specified pattern.
330 if url_pattern and url_pattern not in resp.response.url:
331 skipped_count += 1
332 continue
334 if resp.HasChromeProxyViaHeader():
335 r = resp.response
336 raise ChromeProxyMetricException, (
337 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % (
338 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
339 bypass_count += 1
341 if bypass_count == 0:
342 raise ChromeProxyMetricException, (
343 'Expected at least one response to be bypassed, but zero such '
344 'responses were received.')
346 results.AddValue(scalar.ScalarValue(
347 results.current_page, 'bypass', 'count', bypass_count))
348 results.AddValue(scalar.ScalarValue(
349 results.current_page, 'skipped', 'count', skipped_count))
351 def AddResultsForCorsBypass(self, tab, results):
352 eligible_response_count = 0
353 bypass_count = 0
354 bypasses = {}
355 for resp in self.IterResponses(tab):
356 logging.warn('got a resource %s' % (resp.response.url))
358 for resp in self.IterResponses(tab):
359 if resp.ShouldHaveChromeProxyViaHeader():
360 eligible_response_count += 1
361 if not resp.HasChromeProxyViaHeader():
362 bypass_count += 1
363 elif resp.response.status == 502:
364 bypasses[resp.response.url] = 0
366 for resp in self.IterResponses(tab):
367 if resp.ShouldHaveChromeProxyViaHeader():
368 if not resp.HasChromeProxyViaHeader():
369 if resp.response.status == 200:
370 if (bypasses.has_key(resp.response.url)):
371 bypasses[resp.response.url] = bypasses[resp.response.url] + 1
373 for url in bypasses:
374 if bypasses[url] == 0:
375 raise ChromeProxyMetricException, (
376 '%s: Got a 502 without a subsequent 200' % (url))
377 elif bypasses[url] > 1:
378 raise ChromeProxyMetricException, (
379 '%s: Got a 502 and multiple 200s: %d' % (url, bypasses[url]))
380 if bypass_count == 0:
381 raise ChromeProxyMetricException, (
382 'At least one response should be bypassed. '
383 '(eligible_response_count=%d, bypass_count=%d)\n' % (
384 eligible_response_count, bypass_count))
386 results.AddValue(scalar.ScalarValue(
387 results.current_page, 'cors_bypass', 'count', bypass_count))
389 def AddResultsForBlockOnce(self, tab, results):
390 eligible_response_count = 0
391 via_proxy = 0
393 for resp in self.IterResponses(tab):
394 # Block-once test URLs (Data Reduction Proxy always returns
395 # block-once) should not have the Chrome-Compression-Proxy Via header.
396 if (IsTestUrlForBlockOnce(resp.response.url)):
397 eligible_response_count += 1
398 if resp.HasChromeProxyViaHeader():
399 raise ChromeProxyMetricException, (
400 'Response has a Chrome-Compression-Proxy Via header: ' +
401 resp.response.url)
402 elif resp.ShouldHaveChromeProxyViaHeader():
403 via_proxy += 1
404 if not resp.HasChromeProxyViaHeader():
405 # For all other URLs, confirm that via header is present if expected.
406 raise ChromeProxyMetricException, (
407 'Missing Chrome-Compression-Proxy Via header.' +
408 resp.response.url)
410 if via_proxy == 0:
411 raise ChromeProxyMetricException, (
412 'None of the requests went via data reduction proxy')
414 if (eligible_response_count != 2):
415 raise ChromeProxyMetricException, (
416 'Did not make expected number of requests to whitelisted block-once'
417 ' test URLs. Expected: 2, Actual: ' + str(eligible_response_count))
419 results.AddValue(scalar.ScalarValue(results.current_page,
420 'BlockOnce_success', 'num_eligible_response', 2))
423 def AddResultsForSafebrowsingOn(self, tab, results):
424 results.AddValue(scalar.ScalarValue(
425 results.current_page, 'safebrowsing', 'timeout responses', 1))
427 def AddResultsForSafebrowsingOff(self, tab, results):
428 response_count = 0
429 for resp in self.IterResponses(tab):
430 # Data reduction proxy should return the real response for sites with
431 # malware.
432 response_count += 1
433 if not resp.HasChromeProxyViaHeader():
434 r = resp.response
435 raise ChromeProxyMetricException, (
436 '%s: Safebrowsing feature should be off for desktop and webview.\n'
437 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
438 r.url, r.status, r.status_text, r.headers))
440 if response_count == 0:
441 raise ChromeProxyMetricException, (
442 'Safebrowsing test failed: No valid responses received')
444 results.AddValue(scalar.ScalarValue(
445 results.current_page, 'safebrowsing', 'responses', response_count))
447 def AddResultsForHTTPFallback(self, tab, results):
448 via_fallback_count = 0
450 for resp in self.IterResponses(tab):
451 if resp.ShouldHaveChromeProxyViaHeader():
452 # All responses should have come through the HTTP fallback proxy, which
453 # means that they should have the via header, and if a remote port is
454 # defined, it should be port 80.
455 if (not resp.HasChromeProxyViaHeader() or
456 (resp.remote_port and resp.remote_port != 80)):
457 r = resp.response
458 raise ChromeProxyMetricException, (
459 '%s: Should have come through the fallback proxy.\n'
460 'Reponse: remote_port=%s status=(%d, %s)\nHeaders:\n %s' % (
461 r.url, str(resp.remote_port), r.status, r.status_text,
462 r.headers))
463 via_fallback_count += 1
465 if via_fallback_count == 0:
466 raise ChromeProxyMetricException, (
467 'Expected at least one response through the fallback proxy, but zero '
468 'such responses were received.')
469 results.AddValue(scalar.ScalarValue(
470 results.current_page, 'via_fallback', 'count', via_fallback_count))
472 def AddResultsForHTTPToDirectFallback(self, tab, results,
473 fallback_response_host):
474 via_fallback_count = 0
475 bypass_count = 0
476 responses = self.IterResponses(tab)
478 # The first response(s) coming from fallback_response_host should be
479 # through the HTTP fallback proxy.
480 resp = next(responses, None)
481 while resp and fallback_response_host in resp.response.url:
482 if fallback_response_host in resp.response.url:
483 if (not resp.HasChromeProxyViaHeader() or resp.remote_port != 80):
484 r = resp.response
485 raise ChromeProxyMetricException, (
486 'Response for %s should have come through the fallback proxy.\n'
487 'Response: remote_port=%s status=(%d, %s)\nHeaders:\n %s' % (
488 r.url, str(resp.remote_port), r.status, r.status_text,
489 r.headers))
490 else:
491 via_fallback_count += 1
492 resp = next(responses, None)
494 # All other responses should be bypassed.
495 while resp:
496 if resp.HasChromeProxyViaHeader():
497 r = resp.response
498 raise ChromeProxyMetricException, (
499 'Response for %s should not have via header.\n'
500 'Response: status=(%d, %s)\nHeaders:\n %s' % (
501 r.url, r.status, r.status_text, r.headers))
502 else:
503 bypass_count += 1
504 resp = next(responses, None)
506 # At least one response should go through the http proxy and be bypassed.
507 if via_fallback_count == 0 or bypass_count == 0:
508 raise ChromeProxyMetricException(
509 'There should be at least one response through the fallback proxy '
510 '(actual %s) and at least one bypassed response (actual %s)' %
511 (via_fallback_count, bypass_count))
513 results.AddValue(scalar.ScalarValue(
514 results.current_page, 'via_fallback', 'count', via_fallback_count))
515 results.AddValue(scalar.ScalarValue(
516 results.current_page, 'bypass', 'count', bypass_count))
518 def AddResultsForReenableAfterBypass(
519 self, tab, results, bypass_seconds_min, bypass_seconds_max):
520 """Verify results for a re-enable after bypass test.
522 Args:
523 tab: the tab for the test.
524 results: the results object to add the results values to.
525 bypass_seconds_min: the minimum duration of the bypass.
526 bypass_seconds_max: the maximum duration of the bypass.
528 bypass_count = 0
529 via_count = 0
531 for resp in self.IterResponses(tab):
532 if resp.HasChromeProxyViaHeader():
533 r = resp.response
534 raise ChromeProxyMetricException, (
535 'Response for %s should not have via header.\n'
536 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
537 r.url, r.status, r.status_text, r.headers))
538 else:
539 bypass_count += 1
541 # Wait until 30 seconds before the bypass should expire, and fetch a page.
542 # It should not have the via header because the proxy should still be
543 # bypassed.
544 time.sleep(bypass_seconds_min - 30)
546 tab.ClearCache(force=True)
547 before_metrics = ChromeProxyMetric()
548 before_metrics.Start(results.current_page, tab)
549 tab.Navigate('http://chromeproxy-test.appspot.com/default')
550 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 10)
551 before_metrics.Stop(results.current_page, tab)
553 for resp in before_metrics.IterResponses(tab):
554 if resp.HasChromeProxyViaHeader():
555 r = resp.response
556 raise ChromeProxyMetricException, (
557 'Response for %s should not have via header; proxy should still '
558 'be bypassed.\nReponse: status=(%d, %s)\nHeaders:\n %s' % (
559 r.url, r.status, r.status_text, r.headers))
560 else:
561 bypass_count += 1
562 if bypass_count == 0:
563 raise ChromeProxyMetricException, (
564 'Expected at least one response to be bypassed before the bypass '
565 'expired, but zero such responses were received.')
567 # Wait until 30 seconds after the bypass should expire, and fetch a page. It
568 # should have the via header since the proxy should no longer be bypassed.
569 time.sleep((bypass_seconds_max + 30) - (bypass_seconds_min - 30))
571 tab.ClearCache(force=True)
572 after_metrics = ChromeProxyMetric()
573 after_metrics.Start(results.current_page, tab)
574 tab.Navigate('http://chromeproxy-test.appspot.com/default')
575 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 10)
576 after_metrics.Stop(results.current_page, tab)
578 for resp in after_metrics.IterResponses(tab):
579 if not resp.HasChromeProxyViaHeader():
580 r = resp.response
581 raise ChromeProxyMetricException, (
582 'Response for %s should have via header; proxy should no longer '
583 'be bypassed.\nReponse: status=(%d, %s)\nHeaders:\n %s' % (
584 r.url, r.status, r.status_text, r.headers))
585 else:
586 via_count += 1
587 if via_count == 0:
588 raise ChromeProxyMetricException, (
589 'Expected at least one response through the proxy after the bypass '
590 'expired, but zero such responses were received.')
592 results.AddValue(scalar.ScalarValue(
593 results.current_page, 'bypass', 'count', bypass_count))
594 results.AddValue(scalar.ScalarValue(
595 results.current_page, 'via', 'count', via_count))
597 def AddResultsForClientConfig(self, tab, results):
598 resources_with_old_auth = 0
599 resources_with_new_auth = 0
601 super(ChromeProxyMetric, self).AddResults(tab, results)
602 for resp in self.IterResponses(tab):
603 if resp.GetChromeProxyRequestHeaderValue('s') != None:
604 resources_with_new_auth += 1
605 if resp.GetChromeProxyRequestHeaderValue('ps') != None:
606 resources_with_old_auth += 1
608 if resources_with_old_auth != 0:
609 raise ChromeProxyMetricException, (
610 'Expected zero responses with the old authentication scheme but '
611 'received %d.' % resources_with_old_auth)
613 if resources_with_new_auth == 0:
614 raise ChromeProxyMetricException, (
615 'Expected at least one response with the new authentication scheme, '
616 'but zero such responses were received.')
618 results.AddValue(scalar.ScalarValue(
619 results.current_page, 'new_auth', 'count', resources_with_new_auth))
620 results.AddValue(scalar.ScalarValue(
621 results.current_page, 'old_auth', 'count', resources_with_old_auth))
623 PROXIED = 'proxied'
624 DIRECT = 'direct'
626 class ChromeProxyVideoMetric(network_metrics.NetworkMetric):
627 """Metrics for video pages.
629 Wraps the video metrics produced by videowrapper.js, such as the video
630 duration and size in pixels. Also checks a few basic HTTP response headers
631 such as Content-Type and Content-Length in the video responses.
634 def __init__(self, tab):
635 super(ChromeProxyVideoMetric, self).__init__()
636 with open(os.path.join(os.path.dirname(__file__), 'videowrapper.js')) as f:
637 js = f.read()
638 tab.ExecuteJavaScript(js)
640 def Start(self, page, tab):
641 tab.ExecuteJavaScript('window.__chromeProxyCreateVideoWrappers()')
642 self.videoMetrics = None
643 super(ChromeProxyVideoMetric, self).Start(page, tab)
645 def Stop(self, page, tab):
646 tab.WaitForJavaScriptExpression('window.__chromeProxyVideoLoaded', 30)
647 m = tab.EvaluateJavaScript('window.__chromeProxyVideoMetrics')
649 # Now wait for the video to stop playing.
650 # Give it 2x the total duration to account for buffering.
651 waitTime = 2 * m['video_duration']
652 tab.WaitForJavaScriptExpression('window.__chromeProxyVideoEnded', waitTime)
654 # Load the final metrics.
655 m = tab.EvaluateJavaScript('window.__chromeProxyVideoMetrics')
656 self.videoMetrics = m
657 # Cast this to an integer as it is often approximate (for an unknown reason)
658 m['video_duration'] = int(m['video_duration'])
659 super(ChromeProxyVideoMetric, self).Stop(page, tab)
661 def ResponseFromEvent(self, event):
662 return chrome_proxy_metrics.ChromeProxyResponse(event)
664 def AddResults(self, tab, results):
665 raise NotImplementedError
667 def AddResultsForProxied(self, tab, results):
668 return self._AddResultsShared(PROXIED, tab, results)
670 def AddResultsForDirect(self, tab, results):
671 return self._AddResultsShared(DIRECT, tab, results)
673 def _AddResultsShared(self, kind, tab, results):
674 def err(s):
675 raise ChromeProxyMetricException, s
677 # Should have played the video.
678 if not self.videoMetrics['ready']:
679 err('%s: video not played' % kind)
681 # Should have an HTTP response for the video.
682 wantContentType = 'video/webm' if kind == PROXIED else 'video/mp4'
683 found = False
684 for r in self.IterResponses(tab):
685 resp = r.response
686 if kind == DIRECT and r.HasChromeProxyViaHeader():
687 err('%s: page has proxied Via header' % kind)
688 if resp.GetHeader('Content-Type') != wantContentType:
689 continue
690 if found:
691 err('%s: multiple video responses' % kind)
692 found = True
694 cl = resp.GetHeader('Content-Length')
695 xocl = resp.GetHeader('X-Original-Content-Length')
696 if cl != None:
697 self.videoMetrics['content_length_header'] = int(cl)
698 if xocl != None:
699 self.videoMetrics['x_original_content_length_header'] = int(xocl)
701 # Should have CL always.
702 if cl == None:
703 err('%s: missing ContentLength' % kind)
704 # Proxied: should have CL < XOCL
705 # Direct: should not have XOCL
706 if kind == PROXIED:
707 if xocl == None or int(cl) >= int(xocl):
708 err('%s: bigger response (%s > %s)' % (kind, str(cl), str(xocl)))
709 else:
710 if xocl != None:
711 err('%s: has XOriginalContentLength' % kind)
713 if not found:
714 err('%s: missing video response' % kind)
716 # Finally, add all the metrics to the results.
717 for (k,v) in self.videoMetrics.iteritems():
718 k = "%s_%s" % (k, kind)
719 results.AddValue(scalar.ScalarValue(results.current_page, k, "", v))
721 class ChromeProxyInstrumentedVideoMetric(Metric):
722 """Metric for pages instrumented to evaluate video transcoding."""
723 def __init__(self):
724 super(ChromeProxyInstrumentedVideoMetric, self).__init__()
726 def Stop(self, page, tab):
727 waitTime = tab.EvaluateJavaScript('test.waitTime')
728 tab.WaitForJavaScriptExpression('test.metrics.complete', waitTime)
729 super(ChromeProxyInstrumentedVideoMetric, self).Stop(page, tab)
731 def AddResults(self, tab, results):
732 metrics = tab.EvaluateJavaScript('test.metrics')
733 for (k,v) in metrics.iteritems():
734 results.AddValue(scalar.ScalarValue(results.current_page, k, '', v))
735 try:
736 complete = metrics['complete']
737 failed = metrics['failed']
738 if not complete:
739 raise ChromeProxyMetricException, 'Test not complete'
740 if failed:
741 raise ChromeProxyMetricException, 'failed'
742 except KeyError:
743 raise ChromeProxyMetricException, 'No metrics found'
745 # Returns whether |url| is a block-once test URL. Data Reduction Proxy has been
746 # configured to always return block-once for these URLs.
747 def IsTestUrlForBlockOnce(url):
748 return (url == 'http://check.googlezip.net/blocksingle/' or
749 url == 'http://chromeproxy-test.appspot.com/default?respBody=T0s=&respStatus=200&flywheelAction=block-once')