Chromecast: extracts Linux window creation code to a common place.
[chromium-blink-merge.git] / tools / chrome_proxy / integration_tests / chrome_proxy_metrics.py
blobadcc24119d44203115f6ac398875be9ccb145e19
1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 import datetime
6 import logging
7 import os
9 from integration_tests import network_metrics
10 from telemetry.page import page_test
11 from telemetry.value import scalar
14 class ChromeProxyMetricException(page_test.MeasurementFailure):
15 pass
18 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy'
19 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy'
21 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443'
22 PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS
23 PROXY_DEV_SETTING_HTTP = 'proxy-dev.googlezip.net:80'
24 PROXY_SETTING_HTTP = 'compress.googlezip.net:80'
25 PROXY_SETTING_DIRECT = 'direct://'
27 # The default Chrome Proxy bypass time is a range from one to five mintues.
28 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc.
29 DEFAULT_BYPASS_MIN_SECONDS = 60
30 DEFAULT_BYPASS_MAX_SECONDS = 5 * 60
32 def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'):
33 tab.Navigate(url)
34 with open(os.path.join(os.path.dirname(__file__),
35 'chrome_proxy_metrics.js')) as f:
36 js = f.read()
37 tab.ExecuteJavaScript(js)
38 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
39 info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()')
40 return info
43 def ProxyRetryTimeInRange(retry_time, low, high, grace_seconds=30):
44 return (retry_time >= low - datetime.timedelta(seconds=grace_seconds) and
45 (retry_time < high + datetime.timedelta(seconds=grace_seconds)))
48 class ChromeProxyResponse(network_metrics.HTTPResponse):
49 """ Represents an HTTP response from a timeleine event."""
50 def __init__(self, event):
51 super(ChromeProxyResponse, self).__init__(event)
53 def ShouldHaveChromeProxyViaHeader(self):
54 resp = self.response
55 # Ignore https and data url
56 if resp.url.startswith('https') or resp.url.startswith('data:'):
57 return False
58 # Ignore 304 Not Modified and cache hit.
59 if resp.status == 304 or resp.served_from_cache:
60 return False
61 # Ignore invalid responses that don't have any header. Log a warning.
62 if not resp.headers:
63 logging.warning('response for %s does not any have header '
64 '(refer=%s, status=%s)',
65 resp.url, resp.GetHeader('Referer'), resp.status)
66 return False
67 return True
69 def HasChromeProxyViaHeader(self):
70 via_header = self.response.GetHeader('Via')
71 if not via_header:
72 return False
73 vias = [v.strip(' ') for v in via_header.split(',')]
74 # The Via header is valid if it is the old format or the new format
75 # with 4-character version prefix, for example,
76 # "1.1 Chrome-Compression-Proxy".
77 return (CHROME_PROXY_VIA_HEADER_DEPRECATED in vias or
78 any(v[4:] == CHROME_PROXY_VIA_HEADER for v in vias))
80 def IsValidByViaHeader(self):
81 return (not self.ShouldHaveChromeProxyViaHeader() or
82 self.HasChromeProxyViaHeader())
84 def IsSafebrowsingResponse(self):
85 if (self.response.status == 307 and
86 self.response.GetHeader('X-Malware-Url') == '1' and
87 self.IsValidByViaHeader() and
88 self.response.GetHeader('Location') == self.response.url):
89 return True
90 return False
93 class ChromeProxyMetric(network_metrics.NetworkMetric):
94 """A Chrome proxy timeline metric."""
96 def __init__(self):
97 super(ChromeProxyMetric, self).__init__()
98 self.compute_data_saving = True
99 self.effective_proxies = {
100 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME,
101 "proxy-dev": PROXY_DEV_SETTING_HTTP,
102 "fallback": PROXY_SETTING_HTTP,
103 "direct": PROXY_SETTING_DIRECT,
106 def SetEvents(self, events):
107 """Used for unittest."""
108 self._events = events
110 def ResponseFromEvent(self, event):
111 return ChromeProxyResponse(event)
113 def AddResults(self, tab, results):
114 raise NotImplementedError
116 def AddResultsForDataSaving(self, tab, results):
117 resources_via_proxy = 0
118 resources_from_cache = 0
119 resources_direct = 0
121 super(ChromeProxyMetric, self).AddResults(tab, results)
122 for resp in self.IterResponses(tab):
123 if resp.response.served_from_cache:
124 resources_from_cache += 1
125 if resp.HasChromeProxyViaHeader():
126 resources_via_proxy += 1
127 else:
128 resources_direct += 1
130 results.AddValue(scalar.ScalarValue(
131 results.current_page, 'resources_via_proxy', 'count',
132 resources_via_proxy))
133 results.AddValue(scalar.ScalarValue(
134 results.current_page, 'resources_from_cache', 'count',
135 resources_from_cache))
136 results.AddValue(scalar.ScalarValue(
137 results.current_page, 'resources_direct', 'count', resources_direct))
139 def AddResultsForHeaderValidation(self, tab, results):
140 via_count = 0
141 bypass_count = 0
142 for resp in self.IterResponses(tab):
143 if resp.IsValidByViaHeader():
144 via_count += 1
145 else:
146 bypassed, _ = self.IsProxyBypassed(tab)
147 if tab and bypassed:
148 logging.warning('Proxy bypassed for %s', resp.response.url)
149 bypass_count += 1
150 else:
151 r = resp.response
152 raise ChromeProxyMetricException, (
153 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
154 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
155 results.AddValue(scalar.ScalarValue(
156 results.current_page, 'checked_via_header', 'count', via_count))
157 results.AddValue(scalar.ScalarValue(
158 results.current_page, 'request_bypassed', 'count', bypass_count))
160 def AddResultsForClientVersion(self, tab, results):
161 for resp in self.IterResponses(tab):
162 r = resp.response
163 if resp.response.status != 200:
164 raise ChromeProxyMetricException, ('%s: Response is not 200: %d' %
165 (r.url, r.status))
166 if not resp.IsValidByViaHeader():
167 raise ChromeProxyMetricException, ('%s: Response missing via header' %
168 (r.url))
169 results.AddValue(scalar.ScalarValue(
170 results.current_page, 'version_test', 'count', 1))
172 def ProxyListForDev(self, proxies):
173 return [self.effective_proxies['proxy-dev']
174 if proxy == self.effective_proxies['proxy']
175 else proxy for proxy in proxies]
178 def IsProxyBypassed(self, tab):
179 """Get whether all configured proxies are bypassed.
181 Returns:
182 A tuple of the form (boolean, string list). If all configured proxies
183 are bypassed, then the return value will be (True, bypassed proxies).
184 Otherwise, the return value will be (False, empty list).
186 if not tab:
187 return False, []
189 info = GetProxyInfoFromNetworkInternals(tab)
190 if not info['enabled']:
191 raise ChromeProxyMetricException, (
192 'Chrome proxy should be enabled. proxy info: %s' % info)
194 bad_proxies = [str(p['proxy']) for p in info['badProxies']]
195 bad_proxies.sort()
196 proxies = [self.effective_proxies['proxy'],
197 self.effective_proxies['fallback']]
198 proxies.sort()
199 proxies_dev = self.ProxyListForDev(proxies)
200 proxies_dev.sort()
201 if bad_proxies == proxies:
202 return True, proxies
203 elif bad_proxies == proxies_dev:
204 return True, proxies_dev
205 return False, []
207 def VerifyBadProxies(self, bad_proxies, expected_bad_proxies):
208 """Verify the bad proxy list and their retry times are expected.
210 Args:
211 bad_proxies: the list of actual bad proxies and their retry times.
212 expected_bad_proxies: a list of dictionaries in the form:
214 {'proxy': <proxy origin>,
215 'retry_seconds_low': <minimum bypass duration in seconds>,
216 'retry_seconds_high': <maximum bypass duration in seconds>}
218 If an element in the list is missing either the 'retry_seconds_low'
219 entry or the 'retry_seconds_high' entry, the default bypass minimum
220 and maximum durations respectively will be used for that element.
222 if not bad_proxies:
223 bad_proxies = []
225 # Check that each of the proxy origins and retry times match.
226 for bad_proxy, expected_bad_proxy in map(None, bad_proxies,
227 expected_bad_proxies):
228 # Check if the proxy origins match, allowing for the proxy-dev origin in
229 # the place of the HTTPS proxy origin.
230 if (bad_proxy['proxy'] != expected_bad_proxy['proxy'] and
231 bad_proxy['proxy'] != expected_bad_proxy['proxy'].replace(
232 self.effective_proxies['proxy'],
233 self.effective_proxies['proxy-dev'])):
234 raise ChromeProxyMetricException, (
235 'Actual and expected bad proxies should match: %s vs. %s' % (
236 str(bad_proxy), str(expected_bad_proxy)))
238 # Check that the retry times match.
239 retry_seconds_low = expected_bad_proxy.get('retry_seconds_low',
240 DEFAULT_BYPASS_MIN_SECONDS)
241 retry_seconds_high = expected_bad_proxy.get('retry_seconds_high',
242 DEFAULT_BYPASS_MAX_SECONDS)
243 retry_time_low = (datetime.datetime.now() +
244 datetime.timedelta(seconds=retry_seconds_low))
245 retry_time_high = (datetime.datetime.now() +
246 datetime.timedelta(seconds=retry_seconds_high))
247 got_retry_time = datetime.datetime.fromtimestamp(
248 int(bad_proxy['retry'])/1000)
249 if not ProxyRetryTimeInRange(
250 got_retry_time, retry_time_low, retry_time_high):
251 raise ChromeProxyMetricException, (
252 'Bad proxy %s retry time (%s) should be within range (%s-%s).' % (
253 bad_proxy['proxy'], str(got_retry_time), str(retry_time_low),
254 str(retry_time_high)))
256 def VerifyAllProxiesBypassed(self, tab):
257 if tab:
258 info = GetProxyInfoFromNetworkInternals(tab)
259 if not info['enabled']:
260 raise ChromeProxyMetricException, (
261 'Chrome proxy should be enabled. proxy info: %s' % info)
262 is_bypassed, expected_bad_proxies = self.IsProxyBypassed(tab)
263 if not is_bypassed:
264 raise ChromeProxyMetricException, (
265 'Chrome proxy should be bypassed. proxy info: %s' % info)
266 self.VerifyBadProxies(info['badProxies'],
267 [{'proxy': p} for p in expected_bad_proxies])
269 def AddResultsForBypass(self, tab, results):
270 bypass_count = 0
271 for resp in self.IterResponses(tab):
272 if resp.HasChromeProxyViaHeader():
273 r = resp.response
274 raise ChromeProxyMetricException, (
275 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % (
276 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
277 bypass_count += 1
279 self.VerifyAllProxiesBypassed(tab)
280 results.AddValue(scalar.ScalarValue(
281 results.current_page, 'bypass', 'count', bypass_count))
283 def AddResultsForFallback(self, tab, results):
284 via_proxy_count = 0
285 bypass_count = 0
286 for resp in self.IterResponses(tab):
287 if resp.HasChromeProxyViaHeader():
288 via_proxy_count += 1
289 elif resp.ShouldHaveChromeProxyViaHeader():
290 bypass_count += 1
292 if bypass_count != 1:
293 raise ChromeProxyMetricException, (
294 'Only the triggering response should have bypassed all proxies.')
296 info = GetProxyInfoFromNetworkInternals(tab)
297 if not 'enabled' in info or not info['enabled']:
298 raise ChromeProxyMetricException, (
299 'Chrome proxy should be enabled. proxy info: %s' % info)
300 self.VerifyBadProxies(info['badProxies'],
301 [{'proxy': self.effective_proxies['proxy']}])
303 results.AddValue(scalar.ScalarValue(
304 results.current_page, 'via_proxy', 'count', via_proxy_count))
305 results.AddValue(scalar.ScalarValue(
306 results.current_page, 'bypass', 'count', bypass_count))
308 def AddResultsForCorsBypass(self, tab, results):
309 eligible_response_count = 0
310 bypass_count = 0
311 bypasses = {}
312 for resp in self.IterResponses(tab):
313 logging.warn('got a resource %s' % (resp.response.url))
315 for resp in self.IterResponses(tab):
316 if resp.ShouldHaveChromeProxyViaHeader():
317 eligible_response_count += 1
318 if not resp.HasChromeProxyViaHeader():
319 bypass_count += 1
320 elif resp.response.status == 502:
321 bypasses[resp.response.url] = 0
323 for resp in self.IterResponses(tab):
324 if resp.ShouldHaveChromeProxyViaHeader():
325 if not resp.HasChromeProxyViaHeader():
326 if resp.response.status == 200:
327 if (bypasses.has_key(resp.response.url)):
328 bypasses[resp.response.url] = bypasses[resp.response.url] + 1
330 for url in bypasses:
331 if bypasses[url] == 0:
332 raise ChromeProxyMetricException, (
333 '%s: Got a 502 without a subsequent 200' % (url))
334 elif bypasses[url] > 1:
335 raise ChromeProxyMetricException, (
336 '%s: Got a 502 and multiple 200s: %d' % (url, bypasses[url]))
337 if bypass_count == 0:
338 raise ChromeProxyMetricException, (
339 'At least one response should be bypassed. '
340 '(eligible_response_count=%d, bypass_count=%d)\n' % (
341 eligible_response_count, bypass_count))
343 self.VerifyAllProxiesBypassed(tab)
344 results.AddValue(scalar.ScalarValue(
345 results.current_page, 'cors_bypass', 'count', bypass_count))
347 def AddResultsForBlockOnce(self, tab, results):
348 eligible_response_count = 0
349 bypass_count = 0
350 for resp in self.IterResponses(tab):
351 if resp.ShouldHaveChromeProxyViaHeader():
352 eligible_response_count += 1
353 if not resp.HasChromeProxyViaHeader():
354 bypass_count += 1
356 if tab:
357 info = GetProxyInfoFromNetworkInternals(tab)
358 if not info['enabled']:
359 raise ChromeProxyMetricException, (
360 'Chrome proxy should be enabled. proxy info: %s' % info)
361 self.VerifyBadProxies(info['badProxies'], [])
363 if eligible_response_count <= 1:
364 raise ChromeProxyMetricException, (
365 'There should be more than one DRP eligible response '
366 '(eligible_response_count=%d, bypass_count=%d)\n' % (
367 eligible_response_count, bypass_count))
368 elif bypass_count != 1:
369 raise ChromeProxyMetricException, (
370 'Exactly one response should be bypassed. '
371 '(eligible_response_count=%d, bypass_count=%d)\n' % (
372 eligible_response_count, bypass_count))
373 else:
374 results.AddValue(scalar.ScalarValue(
375 results.current_page, 'eligible_responses', 'count',
376 eligible_response_count))
377 results.AddValue(scalar.ScalarValue(
378 results.current_page, 'bypass', 'count', bypass_count))
380 def AddResultsForSafebrowsing(self, tab, results):
381 count = 0
382 safebrowsing_count = 0
383 for resp in self.IterResponses(tab):
384 count += 1
385 if resp.IsSafebrowsingResponse():
386 safebrowsing_count += 1
387 else:
388 r = resp.response
389 raise ChromeProxyMetricException, (
390 '%s: Not a valid safe browsing response.\n'
391 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
392 r.url, r.status, r.status_text, r.headers))
393 if count == safebrowsing_count:
394 results.AddValue(scalar.ScalarValue(
395 results.current_page, 'safebrowsing', 'boolean', True))
396 else:
397 raise ChromeProxyMetricException, (
398 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % (
399 count, safebrowsing_count))
401 def VerifyProxyInfo(self, tab, expected_proxies, expected_bad_proxies):
402 info = GetProxyInfoFromNetworkInternals(tab)
403 if not 'enabled' in info or not info['enabled']:
404 raise ChromeProxyMetricException, (
405 'Chrome proxy should be enabled. proxy info: %s' % info)
406 proxies = info['proxies']
407 if (proxies != expected_proxies and
408 proxies != self.ProxyListForDev(expected_proxies)):
409 raise ChromeProxyMetricException, (
410 'Wrong effective proxies (%s). Expect: "%s"' % (
411 str(proxies), str(expected_proxies)))
413 bad_proxies = []
414 if 'badProxies' in info and info['badProxies']:
415 bad_proxies = [p['proxy'] for p in info['badProxies']
416 if 'proxy' in p and p['proxy']]
417 if (bad_proxies != expected_bad_proxies and
418 bad_proxies != self.ProxyListForDev(expected_bad_proxies)):
419 raise ChromeProxyMetricException, (
420 'Wrong bad proxies (%s). Expect: "%s"' % (
421 str(bad_proxies), str(expected_bad_proxies)))
423 def AddResultsForHTTPFallback(
424 self, tab, results, expected_proxies=None, expected_bad_proxies=None):
425 if not expected_proxies:
426 expected_proxies = [self.effective_proxies['fallback'],
427 self.effective_proxies['direct']]
428 if not expected_bad_proxies:
429 expected_bad_proxies = []
431 self.VerifyProxyInfo(tab, expected_proxies, expected_bad_proxies)
432 results.AddValue(scalar.ScalarValue(
433 results.current_page, 'http_fallback', 'boolean', True))
435 def AddResultsForHTTPToDirectFallback(self, tab, results):
436 self.VerifyAllProxiesBypassed(tab)
437 results.AddValue(scalar.ScalarValue(
438 results.current_page, 'direct_fallback', 'boolean', True))
440 def AddResultsForExplicitBypass(self, tab, results, expected_bad_proxies):
441 """Verify results for an explicit bypass test.
443 Args:
444 tab: the tab for the test.
445 results: the results object to add the results values to.
446 expected_bad_proxies: A list of dictionary objects representing
447 expected bad proxies and their expected retry time windows.
448 See the definition of VerifyBadProxies for details.
450 info = GetProxyInfoFromNetworkInternals(tab)
451 if not 'enabled' in info or not info['enabled']:
452 raise ChromeProxyMetricException, (
453 'Chrome proxy should be enabled. proxy info: %s' % info)
454 self.VerifyBadProxies(info['badProxies'],
455 expected_bad_proxies)
456 results.AddValue(scalar.ScalarValue(
457 results.current_page, 'explicit_bypass', 'boolean', True))