Mojo C++ bindings: better log message for serialization warnings.
[chromium-blink-merge.git] / tools / chrome_proxy / integration_tests / chrome_proxy_metrics.py
blobfd26a2d9254a03c22d7cf11930cfb4ed033b995f
1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
5 import datetime
6 import logging
7 import os
9 from integration_tests import network_metrics
10 from telemetry.page import page_test
11 from telemetry.value import scalar
14 class ChromeProxyMetricException(page_test.MeasurementFailure):
15 pass
18 CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy'
19 CHROME_PROXY_VIA_HEADER_DEPRECATED = '1.1 Chrome Compression Proxy'
21 PROXY_SETTING_HTTPS = 'proxy.googlezip.net:443'
22 PROXY_SETTING_HTTPS_WITH_SCHEME = 'https://' + PROXY_SETTING_HTTPS
23 PROXY_SETTING_HTTP = 'compress.googlezip.net:80'
24 PROXY_SETTING_DIRECT = 'direct://'
26 # The default Chrome Proxy bypass time is a range from one to five mintues.
27 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc.
28 DEFAULT_BYPASS_MIN_SECONDS = 60
29 DEFAULT_BYPASS_MAX_SECONDS = 5 * 60
31 def GetProxyInfoFromNetworkInternals(tab, url='chrome://net-internals#proxy'):
32 tab.Navigate(url)
33 with open(os.path.join(os.path.dirname(__file__),
34 'chrome_proxy_metrics.js')) as f:
35 js = f.read()
36 tab.ExecuteJavaScript(js)
37 tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
38 info = tab.EvaluateJavaScript('window.__getChromeProxyInfo()')
39 return info
42 def ProxyRetryTimeInRange(retry_time, low, high, grace_seconds=30):
43 return (retry_time >= low and
44 (retry_time < high + datetime.timedelta(seconds=grace_seconds)))
47 class ChromeProxyResponse(network_metrics.HTTPResponse):
48 """ Represents an HTTP response from a timeleine event."""
49 def __init__(self, event):
50 super(ChromeProxyResponse, self).__init__(event)
52 def ShouldHaveChromeProxyViaHeader(self):
53 resp = self.response
54 # Ignore https and data url
55 if resp.url.startswith('https') or resp.url.startswith('data:'):
56 return False
57 # Ignore 304 Not Modified and cache hit.
58 if resp.status == 304 or resp.served_from_cache:
59 return False
60 # Ignore invalid responses that don't have any header. Log a warning.
61 if not resp.headers:
62 logging.warning('response for %s does not any have header '
63 '(refer=%s, status=%s)',
64 resp.url, resp.GetHeader('Referer'), resp.status)
65 return False
66 return True
68 def HasChromeProxyViaHeader(self):
69 via_header = self.response.GetHeader('Via')
70 if not via_header:
71 return False
72 vias = [v.strip(' ') for v in via_header.split(',')]
73 # The Via header is valid if it is the old format or the new format
74 # with 4-character version prefix, for example,
75 # "1.1 Chrome-Compression-Proxy".
76 return (CHROME_PROXY_VIA_HEADER_DEPRECATED in vias or
77 any(v[4:] == CHROME_PROXY_VIA_HEADER for v in vias))
79 def IsValidByViaHeader(self):
80 return (not self.ShouldHaveChromeProxyViaHeader() or
81 self.HasChromeProxyViaHeader())
83 def IsSafebrowsingResponse(self):
84 if (self.response.status == 307 and
85 self.response.GetHeader('X-Malware-Url') == '1' and
86 self.IsValidByViaHeader() and
87 self.response.GetHeader('Location') == self.response.url):
88 return True
89 return False
92 class ChromeProxyMetric(network_metrics.NetworkMetric):
93 """A Chrome proxy timeline metric."""
95 def __init__(self):
96 super(ChromeProxyMetric, self).__init__()
97 self.compute_data_saving = True
98 self.effective_proxies = {
99 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME,
100 "fallback": PROXY_SETTING_HTTP,
101 "direct": PROXY_SETTING_DIRECT,
104 def SetEvents(self, events):
105 """Used for unittest."""
106 self._events = events
108 def ResponseFromEvent(self, event):
109 return ChromeProxyResponse(event)
111 def AddResults(self, tab, results):
112 raise NotImplementedError
114 def AddResultsForDataSaving(self, tab, results):
115 resources_via_proxy = 0
116 resources_from_cache = 0
117 resources_direct = 0
119 super(ChromeProxyMetric, self).AddResults(tab, results)
120 for resp in self.IterResponses(tab):
121 if resp.response.served_from_cache:
122 resources_from_cache += 1
123 if resp.HasChromeProxyViaHeader():
124 resources_via_proxy += 1
125 else:
126 resources_direct += 1
128 results.AddValue(scalar.ScalarValue(
129 results.current_page, 'resources_via_proxy', 'count',
130 resources_via_proxy))
131 results.AddValue(scalar.ScalarValue(
132 results.current_page, 'resources_from_cache', 'count',
133 resources_from_cache))
134 results.AddValue(scalar.ScalarValue(
135 results.current_page, 'resources_direct', 'count', resources_direct))
137 def AddResultsForHeaderValidation(self, tab, results):
138 via_count = 0
139 bypass_count = 0
140 for resp in self.IterResponses(tab):
141 if resp.IsValidByViaHeader():
142 via_count += 1
143 elif tab and self.IsProxyBypassed(tab):
144 logging.warning('Proxy bypassed for %s', resp.response.url)
145 bypass_count += 1
146 else:
147 r = resp.response
148 raise ChromeProxyMetricException, (
149 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
150 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
151 results.AddValue(scalar.ScalarValue(
152 results.current_page, 'checked_via_header', 'count', via_count))
153 results.AddValue(scalar.ScalarValue(
154 results.current_page, 'request_bypassed', 'count', bypass_count))
156 def IsProxyBypassed(self, tab):
157 """ Returns True if all configured proxies are bypassed."""
158 info = GetProxyInfoFromNetworkInternals(tab)
159 if not info['enabled']:
160 raise ChromeProxyMetricException, (
161 'Chrome proxy should be enabled. proxy info: %s' % info)
163 bad_proxies = [str(p['proxy']) for p in info['badProxies']].sort()
164 proxies = [self.effective_proxies['proxy'],
165 self.effective_proxies['fallback']].sort()
166 return bad_proxies == proxies
168 @staticmethod
169 def VerifyBadProxies(
170 badProxies, expected_proxies,
171 retry_seconds_low = DEFAULT_BYPASS_MIN_SECONDS,
172 retry_seconds_high = DEFAULT_BYPASS_MAX_SECONDS):
173 """Verify the bad proxy list and their retry times are expected. """
174 if not badProxies or (len(badProxies) != len(expected_proxies)):
175 return False
177 # Check all expected proxies.
178 proxies = [p['proxy'] for p in badProxies]
179 expected_proxies.sort()
180 proxies.sort()
181 if not expected_proxies == proxies:
182 raise ChromeProxyMetricException, (
183 'Bad proxies: got %s want %s' % (
184 str(badProxies), str(expected_proxies)))
186 # Check retry time
187 for p in badProxies:
188 retry_time_low = (datetime.datetime.now() +
189 datetime.timedelta(seconds=retry_seconds_low))
190 retry_time_high = (datetime.datetime.now() +
191 datetime.timedelta(seconds=retry_seconds_high))
192 got_retry_time = datetime.datetime.fromtimestamp(int(p['retry'])/1000)
193 if not ProxyRetryTimeInRange(
194 got_retry_time, retry_time_low, retry_time_high):
195 raise ChromeProxyMetricException, (
196 'Bad proxy %s retry time (%s) should be within range (%s-%s).' % (
197 p['proxy'], str(got_retry_time), str(retry_time_low),
198 str(retry_time_high)))
199 return True
201 def AddResultsForBypass(self, tab, results):
202 bypass_count = 0
203 for resp in self.IterResponses(tab):
204 if resp.HasChromeProxyViaHeader():
205 r = resp.response
206 raise ChromeProxyMetricException, (
207 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % (
208 r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
209 bypass_count += 1
211 if tab:
212 info = GetProxyInfoFromNetworkInternals(tab)
213 if not info['enabled']:
214 raise ChromeProxyMetricException, (
215 'Chrome proxy should be enabled. proxy info: %s' % info)
216 self.VerifyBadProxies(
217 info['badProxies'],
218 [self.effective_proxies['proxy'],
219 self.effective_proxies['fallback']])
221 results.AddValue(scalar.ScalarValue(
222 results.current_page, 'bypass', 'count', bypass_count))
224 def AddResultsForSafebrowsing(self, tab, results):
225 count = 0
226 safebrowsing_count = 0
227 for resp in self.IterResponses(tab):
228 count += 1
229 if resp.IsSafebrowsingResponse():
230 safebrowsing_count += 1
231 else:
232 r = resp.response
233 raise ChromeProxyMetricException, (
234 '%s: Not a valid safe browsing response.\n'
235 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
236 r.url, r.status, r.status_text, r.headers))
237 if count == safebrowsing_count:
238 results.AddValue(scalar.ScalarValue(
239 results.current_page, 'safebrowsing', 'boolean', True))
240 else:
241 raise ChromeProxyMetricException, (
242 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % (
243 count, safebrowsing_count))
245 def AddResultsForHTTPFallback(
246 self, tab, results, expected_proxies=None, expected_bad_proxies=None):
247 info = GetProxyInfoFromNetworkInternals(tab)
248 if not 'enabled' in info or not info['enabled']:
249 raise ChromeProxyMetricException, (
250 'Chrome proxy should be enabled. proxy info: %s' % info)
252 if not expected_proxies:
253 expected_proxies = [self.effective_proxies['fallback'],
254 self.effective_proxies['direct']]
255 if not expected_bad_proxies:
256 expected_bad_proxies = []
258 proxies = info['proxies']
259 if proxies != expected_proxies:
260 raise ChromeProxyMetricException, (
261 'Wrong effective proxies (%s). Expect: "%s"' % (
262 str(proxies), str(expected_proxies)))
264 bad_proxies = []
265 if 'badProxies' in info and info['badProxies']:
266 bad_proxies = [p['proxy'] for p in info['badProxies']
267 if 'proxy' in p and p['proxy']]
268 if bad_proxies != expected_bad_proxies:
269 raise ChromeProxyMetricException, (
270 'Wrong bad proxies (%s). Expect: "%s"' % (
271 str(bad_proxies), str(expected_bad_proxies)))
272 results.AddValue(scalar.ScalarValue(
273 results.current_page, 'http_fallback', 'boolean', True))