1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
9 from integration_tests
import network_metrics
10 from telemetry
.core
import util
11 from telemetry
.page
import page_test
12 from telemetry
.value
import scalar
15 class ChromeProxyMetricException(page_test
.MeasurementFailure
):
19 CHROME_PROXY_VIA_HEADER
= 'Chrome-Compression-Proxy'
20 CHROME_PROXY_VIA_HEADER_DEPRECATED
= '1.1 Chrome Compression Proxy'
22 PROXY_SETTING_HTTPS
= 'proxy.googlezip.net:443'
23 PROXY_SETTING_HTTPS_WITH_SCHEME
= 'https://' + PROXY_SETTING_HTTPS
24 PROXY_DEV_SETTING_HTTP
= 'proxy-xt.googlezip.net:80'
25 PROXY_SETTING_HTTP
= 'compress.googlezip.net:80'
26 PROXY_SETTING_DIRECT
= 'direct://'
28 # The default Chrome Proxy bypass time is a range from one to five mintues.
29 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc.
30 DEFAULT_BYPASS_MIN_SECONDS
= 60
31 DEFAULT_BYPASS_MAX_SECONDS
= 5 * 60
33 def GetProxyInfoFromNetworkInternals(tab
, url
='chrome://net-internals#proxy'):
35 with
open(os
.path
.join(os
.path
.dirname(__file__
),
36 'chrome_proxy_metrics.js')) as f
:
38 tab
.ExecuteJavaScript(js
)
39 tab
.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
41 # Sometimes, the proxy information on net_internals#proxy is slow to come up.
42 # In order to prevent this from causing tests to flake frequently, wait for
43 # up to 10 seconds for this information to appear.
44 def IsDataReductionProxyEnabled():
45 info
= tab
.EvaluateJavaScript('window.__getChromeProxyInfo()')
46 return info
['enabled']
48 util
.WaitFor(IsDataReductionProxyEnabled
, 10)
49 info
= tab
.EvaluateJavaScript('window.__getChromeProxyInfo()')
53 def ProxyRetryTimeInRange(retry_time
, low
, high
, grace_seconds
=30):
54 return (retry_time
>= low
- datetime
.timedelta(seconds
=grace_seconds
) and
55 (retry_time
< high
+ datetime
.timedelta(seconds
=grace_seconds
)))
58 class ChromeProxyResponse(network_metrics
.HTTPResponse
):
59 """ Represents an HTTP response from a timeleine event."""
60 def __init__(self
, event
):
61 super(ChromeProxyResponse
, self
).__init
__(event
)
63 def ShouldHaveChromeProxyViaHeader(self
):
65 # Ignore https and data url
66 if resp
.url
.startswith('https') or resp
.url
.startswith('data:'):
68 # Ignore 304 Not Modified and cache hit.
69 if resp
.status
== 304 or resp
.served_from_cache
:
71 # Ignore invalid responses that don't have any header. Log a warning.
73 logging
.warning('response for %s does not any have header '
74 '(refer=%s, status=%s)',
75 resp
.url
, resp
.GetHeader('Referer'), resp
.status
)
79 def HasChromeProxyViaHeader(self
):
80 via_header
= self
.response
.GetHeader('Via')
83 vias
= [v
.strip(' ') for v
in via_header
.split(',')]
84 # The Via header is valid if it is the old format or the new format
85 # with 4-character version prefix, for example,
86 # "1.1 Chrome-Compression-Proxy".
87 return (CHROME_PROXY_VIA_HEADER_DEPRECATED
in vias
or
88 any(v
[4:] == CHROME_PROXY_VIA_HEADER
for v
in vias
))
90 def IsValidByViaHeader(self
):
91 return (not self
.ShouldHaveChromeProxyViaHeader() or
92 self
.HasChromeProxyViaHeader())
94 def IsSafebrowsingResponse(self
):
95 if (self
.response
.status
== 307 and
96 self
.response
.GetHeader('X-Malware-Url') == '1' and
97 self
.IsValidByViaHeader() and
98 self
.response
.GetHeader('Location') == self
.response
.url
):
102 def GetChromeProxyClientType(self
):
103 """Get the client type directive from the Chrome-Proxy request header.
106 The client type directive from the Chrome-Proxy request header for the
107 request that lead to this response. For example, if the request header
108 "Chrome-Proxy: c=android" is present, then this method would return
109 "android". Returns None if no client type directive is present.
111 if 'Chrome-Proxy' not in self
.response
.request_headers
:
114 chrome_proxy_request_header
= self
.response
.request_headers
['Chrome-Proxy']
115 values
= [v
.strip() for v
in chrome_proxy_request_header
.split(',')]
117 kvp
= value
.split('=', 1)
118 if len(kvp
) == 2 and kvp
[0].strip() == 'c':
119 return kvp
[1].strip()
123 class ChromeProxyMetric(network_metrics
.NetworkMetric
):
124 """A Chrome proxy timeline metric."""
127 super(ChromeProxyMetric
, self
).__init
__()
128 self
.compute_data_saving
= True
129 self
.effective_proxies
= {
130 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME
,
131 "proxy-dev": PROXY_DEV_SETTING_HTTP
,
132 "fallback": PROXY_SETTING_HTTP
,
133 "direct": PROXY_SETTING_DIRECT
,
136 def SetEvents(self
, events
):
137 """Used for unittest."""
138 self
._events
= events
140 def ResponseFromEvent(self
, event
):
141 return ChromeProxyResponse(event
)
143 def AddResults(self
, tab
, results
):
144 raise NotImplementedError
146 def AddResultsForDataSaving(self
, tab
, results
):
147 resources_via_proxy
= 0
148 resources_from_cache
= 0
151 super(ChromeProxyMetric
, self
).AddResults(tab
, results
)
152 for resp
in self
.IterResponses(tab
):
153 if resp
.response
.served_from_cache
:
154 resources_from_cache
+= 1
155 if resp
.HasChromeProxyViaHeader():
156 resources_via_proxy
+= 1
158 resources_direct
+= 1
160 results
.AddValue(scalar
.ScalarValue(
161 results
.current_page
, 'resources_via_proxy', 'count',
162 resources_via_proxy
))
163 results
.AddValue(scalar
.ScalarValue(
164 results
.current_page
, 'resources_from_cache', 'count',
165 resources_from_cache
))
166 results
.AddValue(scalar
.ScalarValue(
167 results
.current_page
, 'resources_direct', 'count', resources_direct
))
169 def AddResultsForHeaderValidation(self
, tab
, results
):
172 for resp
in self
.IterResponses(tab
):
173 if resp
.IsValidByViaHeader():
176 bypassed
, _
= self
.IsProxyBypassed(tab
)
178 logging
.warning('Proxy bypassed for %s', resp
.response
.url
)
182 raise ChromeProxyMetricException
, (
183 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
184 r
.url
, r
.GetHeader('Via'), r
.GetHeader('Referer'), r
.status
))
185 results
.AddValue(scalar
.ScalarValue(
186 results
.current_page
, 'checked_via_header', 'count', via_count
))
187 results
.AddValue(scalar
.ScalarValue(
188 results
.current_page
, 'request_bypassed', 'count', bypass_count
))
190 def AddResultsForClientVersion(self
, tab
, results
):
191 for resp
in self
.IterResponses(tab
):
193 if resp
.response
.status
!= 200:
194 raise ChromeProxyMetricException
, ('%s: Response is not 200: %d' %
196 if not resp
.IsValidByViaHeader():
197 raise ChromeProxyMetricException
, ('%s: Response missing via header' %
199 results
.AddValue(scalar
.ScalarValue(
200 results
.current_page
, 'version_test', 'count', 1))
202 def GetClientTypeFromRequests(self
, tab
):
203 """Get the Chrome-Proxy client type value from requests made in this tab.
206 The client type value from the first request made in this tab that
207 specifies a client type in the Chrome-Proxy request header. See
208 ChromeProxyResponse.GetChromeProxyClientType for more details about the
209 Chrome-Proxy client type. Returns None if none of the requests made in
210 this tab specify a client type.
212 for resp
in self
.IterResponses(tab
):
213 client_type
= resp
.GetChromeProxyClientType()
218 def AddResultsForClientType(self
, tab
, results
, client_type
,
219 bypass_for_client_type
):
223 for resp
in self
.IterResponses(tab
):
224 if resp
.HasChromeProxyViaHeader():
226 if client_type
.lower() == bypass_for_client_type
.lower():
227 raise ChromeProxyMetricException
, (
228 '%s: Response for client of type "%s" has via header, but should '
230 resp
.response
.url
, bypass_for_client_type
, client_type
))
231 elif resp
.ShouldHaveChromeProxyViaHeader():
233 if client_type
.lower() != bypass_for_client_type
.lower():
234 raise ChromeProxyMetricException
, (
235 '%s: Response missing via header. Only "%s" clients should '
236 'bypass for this page, but this client is "%s".' % (
237 resp
.response
.url
, bypass_for_client_type
, client_type
))
239 results
.AddValue(scalar
.ScalarValue(
240 results
.current_page
, 'via', 'count', via_count
))
241 results
.AddValue(scalar
.ScalarValue(
242 results
.current_page
, 'bypass', 'count', bypass_count
))
244 def ProxyListForDev(self
, proxies
):
245 return [self
.effective_proxies
['proxy-dev']
246 if proxy
== self
.effective_proxies
['proxy']
247 else proxy
for proxy
in proxies
]
249 def IsProxyBypassed(self
, tab
):
250 """Get whether all configured proxies are bypassed.
253 A tuple of the form (boolean, string list). If all configured proxies
254 are bypassed, then the return value will be (True, bypassed proxies).
255 Otherwise, the return value will be (False, empty list).
260 info
= GetProxyInfoFromNetworkInternals(tab
)
261 if not info
['enabled']:
262 raise ChromeProxyMetricException
, (
263 'Chrome proxy should be enabled. proxy info: %s' % info
)
265 if not info
['badProxies']:
268 bad_proxies
= [str(p
['proxy']) for p
in info
['badProxies']]
270 proxies
= [self
.effective_proxies
['proxy'],
271 self
.effective_proxies
['fallback']]
273 proxies_dev
= self
.ProxyListForDev(proxies
)
275 if bad_proxies
== proxies
:
277 elif bad_proxies
== proxies_dev
:
278 return True, proxies_dev
281 def VerifyBadProxies(self
, bad_proxies
, expected_bad_proxies
):
282 """Verify the bad proxy list and their retry times are expected.
285 bad_proxies: the list of actual bad proxies and their retry times.
286 expected_bad_proxies: a list of dictionaries in the form:
288 {'proxy': <proxy origin>,
289 'retry_seconds_low': <minimum bypass duration in seconds>,
290 'retry_seconds_high': <maximum bypass duration in seconds>}
292 If an element in the list is missing either the 'retry_seconds_low'
293 entry or the 'retry_seconds_high' entry, the default bypass minimum
294 and maximum durations respectively will be used for that element.
299 # Check that each of the proxy origins and retry times match.
300 for bad_proxy
, expected_bad_proxy
in map(None, bad_proxies
,
301 expected_bad_proxies
):
302 # Check if the proxy origins match, allowing for the proxy-dev origin in
303 # the place of the HTTPS proxy origin.
304 if (bad_proxy
['proxy'] != expected_bad_proxy
['proxy'] and
305 bad_proxy
['proxy'] != expected_bad_proxy
['proxy'].replace(
306 self
.effective_proxies
['proxy'],
307 self
.effective_proxies
['proxy-dev'])):
308 raise ChromeProxyMetricException
, (
309 'Actual and expected bad proxies should match: %s vs. %s' % (
310 str(bad_proxy
), str(expected_bad_proxy
)))
312 # Check that the retry times match.
313 retry_seconds_low
= expected_bad_proxy
.get('retry_seconds_low',
314 DEFAULT_BYPASS_MIN_SECONDS
)
315 retry_seconds_high
= expected_bad_proxy
.get('retry_seconds_high',
316 DEFAULT_BYPASS_MAX_SECONDS
)
317 retry_time_low
= (datetime
.datetime
.now() +
318 datetime
.timedelta(seconds
=retry_seconds_low
))
319 retry_time_high
= (datetime
.datetime
.now() +
320 datetime
.timedelta(seconds
=retry_seconds_high
))
321 got_retry_time
= datetime
.datetime
.fromtimestamp(
322 int(bad_proxy
['retry'])/1000)
323 if not ProxyRetryTimeInRange(
324 got_retry_time
, retry_time_low
, retry_time_high
):
325 raise ChromeProxyMetricException
, (
326 'Bad proxy %s retry time (%s) should be within range (%s-%s).' % (
327 bad_proxy
['proxy'], str(got_retry_time
), str(retry_time_low
),
328 str(retry_time_high
)))
330 def VerifyAllProxiesBypassed(self
, tab
):
331 """Verify that all proxies are bypassed for 1 to 5 minutes."""
333 info
= GetProxyInfoFromNetworkInternals(tab
)
334 if not info
['enabled']:
335 raise ChromeProxyMetricException
, (
336 'Chrome proxy should be enabled. proxy info: %s' % info
)
337 is_bypassed
, expected_bad_proxies
= self
.IsProxyBypassed(tab
)
339 raise ChromeProxyMetricException
, (
340 'Chrome proxy should be bypassed. proxy info: %s' % info
)
341 self
.VerifyBadProxies(info
['badProxies'],
342 [{'proxy': p
} for p
in expected_bad_proxies
])
344 def AddResultsForBypass(self
, tab
, results
):
346 for resp
in self
.IterResponses(tab
):
347 if resp
.HasChromeProxyViaHeader():
349 raise ChromeProxyMetricException
, (
350 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % (
351 r
.url
, r
.GetHeader('Via'), r
.GetHeader('Referer'), r
.status
))
354 self
.VerifyAllProxiesBypassed(tab
)
355 results
.AddValue(scalar
.ScalarValue(
356 results
.current_page
, 'bypass', 'count', bypass_count
))
358 def AddResultsForFallback(self
, tab
, results
):
361 for resp
in self
.IterResponses(tab
):
362 if resp
.HasChromeProxyViaHeader():
364 elif resp
.ShouldHaveChromeProxyViaHeader():
367 if bypass_count
!= 1:
368 raise ChromeProxyMetricException
, (
369 'Only the triggering response should have bypassed all proxies.')
371 info
= GetProxyInfoFromNetworkInternals(tab
)
372 if not 'enabled' in info
or not info
['enabled']:
373 raise ChromeProxyMetricException
, (
374 'Chrome proxy should be enabled. proxy info: %s' % info
)
375 self
.VerifyBadProxies(info
['badProxies'],
376 [{'proxy': self
.effective_proxies
['proxy']}])
378 results
.AddValue(scalar
.ScalarValue(
379 results
.current_page
, 'via_proxy', 'count', via_proxy_count
))
380 results
.AddValue(scalar
.ScalarValue(
381 results
.current_page
, 'bypass', 'count', bypass_count
))
383 def AddResultsForCorsBypass(self
, tab
, results
):
384 eligible_response_count
= 0
387 for resp
in self
.IterResponses(tab
):
388 logging
.warn('got a resource %s' % (resp
.response
.url
))
390 for resp
in self
.IterResponses(tab
):
391 if resp
.ShouldHaveChromeProxyViaHeader():
392 eligible_response_count
+= 1
393 if not resp
.HasChromeProxyViaHeader():
395 elif resp
.response
.status
== 502:
396 bypasses
[resp
.response
.url
] = 0
398 for resp
in self
.IterResponses(tab
):
399 if resp
.ShouldHaveChromeProxyViaHeader():
400 if not resp
.HasChromeProxyViaHeader():
401 if resp
.response
.status
== 200:
402 if (bypasses
.has_key(resp
.response
.url
)):
403 bypasses
[resp
.response
.url
] = bypasses
[resp
.response
.url
] + 1
406 if bypasses
[url
] == 0:
407 raise ChromeProxyMetricException
, (
408 '%s: Got a 502 without a subsequent 200' % (url
))
409 elif bypasses
[url
] > 1:
410 raise ChromeProxyMetricException
, (
411 '%s: Got a 502 and multiple 200s: %d' % (url
, bypasses
[url
]))
412 if bypass_count
== 0:
413 raise ChromeProxyMetricException
, (
414 'At least one response should be bypassed. '
415 '(eligible_response_count=%d, bypass_count=%d)\n' % (
416 eligible_response_count
, bypass_count
))
418 results
.AddValue(scalar
.ScalarValue(
419 results
.current_page
, 'cors_bypass', 'count', bypass_count
))
421 def AddResultsForBlockOnce(self
, tab
, results
):
422 eligible_response_count
= 0
424 for resp
in self
.IterResponses(tab
):
425 if resp
.ShouldHaveChromeProxyViaHeader():
426 eligible_response_count
+= 1
427 if not resp
.HasChromeProxyViaHeader():
431 info
= GetProxyInfoFromNetworkInternals(tab
)
432 if not info
['enabled']:
433 raise ChromeProxyMetricException
, (
434 'Chrome proxy should be enabled. proxy info: %s' % info
)
435 self
.VerifyBadProxies(info
['badProxies'], [])
437 if eligible_response_count
<= 1:
438 raise ChromeProxyMetricException
, (
439 'There should be more than one DRP eligible response '
440 '(eligible_response_count=%d, bypass_count=%d)\n' % (
441 eligible_response_count
, bypass_count
))
442 elif bypass_count
!= 1:
443 raise ChromeProxyMetricException
, (
444 'Exactly one response should be bypassed. '
445 '(eligible_response_count=%d, bypass_count=%d)\n' % (
446 eligible_response_count
, bypass_count
))
448 results
.AddValue(scalar
.ScalarValue(
449 results
.current_page
, 'eligible_responses', 'count',
450 eligible_response_count
))
451 results
.AddValue(scalar
.ScalarValue(
452 results
.current_page
, 'bypass', 'count', bypass_count
))
454 def AddResultsForSafebrowsing(self
, tab
, results
):
456 safebrowsing_count
= 0
457 for resp
in self
.IterResponses(tab
):
459 if resp
.IsSafebrowsingResponse():
460 safebrowsing_count
+= 1
463 raise ChromeProxyMetricException
, (
464 '%s: Not a valid safe browsing response.\n'
465 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
466 r
.url
, r
.status
, r
.status_text
, r
.headers
))
467 if count
== safebrowsing_count
:
468 results
.AddValue(scalar
.ScalarValue(
469 results
.current_page
, 'safebrowsing', 'boolean', True))
471 raise ChromeProxyMetricException
, (
472 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % (
473 count
, safebrowsing_count
))
475 def VerifyProxyInfo(self
, tab
, expected_proxies
, expected_bad_proxies
):
476 info
= GetProxyInfoFromNetworkInternals(tab
)
477 if not 'enabled' in info
or not info
['enabled']:
478 raise ChromeProxyMetricException
, (
479 'Chrome proxy should be enabled. proxy info: %s' % info
)
480 proxies
= info
['proxies']
481 if (set(proxies
) != set(expected_proxies
) and
482 set(proxies
) != set(self
.ProxyListForDev(expected_proxies
))):
483 raise ChromeProxyMetricException
, (
484 'Wrong effective proxies (%s). Expect: "%s"' % (
485 str(proxies
), str(expected_proxies
)))
488 if 'badProxies' in info
and info
['badProxies']:
489 bad_proxies
= [p
['proxy'] for p
in info
['badProxies']
490 if 'proxy' in p
and p
['proxy']]
491 if (set(bad_proxies
) != set(expected_bad_proxies
) and
492 set(bad_proxies
) != set(self
.ProxyListForDev(expected_bad_proxies
))):
493 raise ChromeProxyMetricException
, (
494 'Wrong bad proxies (%s). Expect: "%s"' % (
495 str(bad_proxies
), str(expected_bad_proxies
)))
497 def AddResultsForHTTPFallback(
498 self
, tab
, results
, expected_proxies
=None, expected_bad_proxies
=None):
499 if not expected_proxies
:
500 expected_proxies
= [self
.effective_proxies
['fallback'],
501 self
.effective_proxies
['direct']]
502 if not expected_bad_proxies
:
503 expected_bad_proxies
= []
505 self
.VerifyProxyInfo(tab
, expected_proxies
, expected_bad_proxies
)
506 results
.AddValue(scalar
.ScalarValue(
507 results
.current_page
, 'http_fallback', 'boolean', True))
509 def AddResultsForHTTPToDirectFallback(self
, tab
, results
):
510 self
.VerifyAllProxiesBypassed(tab
)
511 results
.AddValue(scalar
.ScalarValue(
512 results
.current_page
, 'direct_fallback', 'boolean', True))
514 def AddResultsForExplicitBypass(self
, tab
, results
, expected_bad_proxies
):
515 """Verify results for an explicit bypass test.
518 tab: the tab for the test.
519 results: the results object to add the results values to.
520 expected_bad_proxies: A list of dictionary objects representing
521 expected bad proxies and their expected retry time windows.
522 See the definition of VerifyBadProxies for details.
524 info
= GetProxyInfoFromNetworkInternals(tab
)
525 if not 'enabled' in info
or not info
['enabled']:
526 raise ChromeProxyMetricException
, (
527 'Chrome proxy should be enabled. proxy info: %s' % info
)
528 self
.VerifyBadProxies(info
['badProxies'],
529 expected_bad_proxies
)
530 results
.AddValue(scalar
.ScalarValue(
531 results
.current_page
, 'explicit_bypass', 'boolean', True))