1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
9 from integration_tests
import network_metrics
10 from telemetry
.core
import util
11 from telemetry
.page
import page_test
12 from telemetry
.value
import scalar
15 class ChromeProxyMetricException(page_test
.MeasurementFailure
):
19 CHROME_PROXY_VIA_HEADER
= 'Chrome-Compression-Proxy'
20 CHROME_PROXY_VIA_HEADER_DEPRECATED
= '1.1 Chrome Compression Proxy'
22 PROXY_SETTING_HTTPS
= 'proxy.googlezip.net:443'
23 PROXY_SETTING_HTTPS_WITH_SCHEME
= 'https://' + PROXY_SETTING_HTTPS
24 PROXY_DEV_SETTING_HTTP
= 'proxy-xt.googlezip.net:80'
25 PROXY_SETTING_HTTP
= 'compress.googlezip.net:80'
26 PROXY_SETTING_DIRECT
= 'direct://'
28 # The default Chrome Proxy bypass time is a range from one to five mintues.
29 # See ProxyList::UpdateRetryInfoOnFallback in net/proxy/proxy_list.cc.
30 DEFAULT_BYPASS_MIN_SECONDS
= 60
31 DEFAULT_BYPASS_MAX_SECONDS
= 5 * 60
33 def GetProxyInfoFromNetworkInternals(tab
, url
='chrome://net-internals#proxy'):
35 with
open(os
.path
.join(os
.path
.dirname(__file__
),
36 'chrome_proxy_metrics.js')) as f
:
38 tab
.ExecuteJavaScript(js
)
39 tab
.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
41 # Sometimes, the proxy information on net_internals#proxy is slow to come up.
42 # In order to prevent this from causing tests to flake frequently, wait for
43 # up to 10 seconds for this information to appear.
44 def IsDataReductionProxyEnabled():
45 info
= tab
.EvaluateJavaScript('window.__getChromeProxyInfo()')
46 return info
['enabled']
48 util
.WaitFor(IsDataReductionProxyEnabled
, 10)
49 info
= tab
.EvaluateJavaScript('window.__getChromeProxyInfo()')
53 def ProxyRetryTimeInRange(retry_time
, low
, high
, grace_seconds
=30):
54 return (retry_time
>= low
- datetime
.timedelta(seconds
=grace_seconds
) and
55 (retry_time
< high
+ datetime
.timedelta(seconds
=grace_seconds
)))
58 class ChromeProxyResponse(network_metrics
.HTTPResponse
):
59 """ Represents an HTTP response from a timeleine event."""
60 def __init__(self
, event
):
61 super(ChromeProxyResponse
, self
).__init
__(event
)
63 def ShouldHaveChromeProxyViaHeader(self
):
65 # Ignore https and data url
66 if resp
.url
.startswith('https') or resp
.url
.startswith('data:'):
68 # Ignore 304 Not Modified and cache hit.
69 if resp
.status
== 304 or resp
.served_from_cache
:
71 # Ignore invalid responses that don't have any header. Log a warning.
73 logging
.warning('response for %s does not any have header '
74 '(refer=%s, status=%s)',
75 resp
.url
, resp
.GetHeader('Referer'), resp
.status
)
79 def HasChromeProxyViaHeader(self
):
80 via_header
= self
.response
.GetHeader('Via')
83 vias
= [v
.strip(' ') for v
in via_header
.split(',')]
84 # The Via header is valid if it is the old format or the new format
85 # with 4-character version prefix, for example,
86 # "1.1 Chrome-Compression-Proxy".
87 return (CHROME_PROXY_VIA_HEADER_DEPRECATED
in vias
or
88 any(v
[4:] == CHROME_PROXY_VIA_HEADER
for v
in vias
))
90 def IsValidByViaHeader(self
):
91 return (not self
.ShouldHaveChromeProxyViaHeader() or
92 self
.HasChromeProxyViaHeader())
94 def IsSafebrowsingResponse(self
):
95 if (self
.response
.status
== 307 and
96 self
.response
.GetHeader('X-Malware-Url') == '1' and
97 self
.IsValidByViaHeader() and
98 self
.response
.GetHeader('Location') == self
.response
.url
):
102 def GetChromeProxyClientType(self
):
103 """Get the client type directive from the Chrome-Proxy request header.
106 The client type directive from the Chrome-Proxy request header for the
107 request that lead to this response. For example, if the request header
108 "Chrome-Proxy: c=android" is present, then this method would return
109 "android". Returns None if no client type directive is present.
111 if 'Chrome-Proxy' not in self
.response
.request_headers
:
114 chrome_proxy_request_header
= self
.response
.request_headers
['Chrome-Proxy']
115 values
= [v
.strip() for v
in chrome_proxy_request_header
.split(',')]
117 kvp
= value
.split('=', 1)
118 if len(kvp
) == 2 and kvp
[0].strip() == 'c':
119 return kvp
[1].strip()
123 class ChromeProxyMetric(network_metrics
.NetworkMetric
):
124 """A Chrome proxy timeline metric."""
127 super(ChromeProxyMetric
, self
).__init
__()
128 self
.compute_data_saving
= True
129 self
.effective_proxies
= {
130 "proxy": PROXY_SETTING_HTTPS_WITH_SCHEME
,
131 "proxy-dev": PROXY_DEV_SETTING_HTTP
,
132 "fallback": PROXY_SETTING_HTTP
,
133 "direct": PROXY_SETTING_DIRECT
,
136 def SetEvents(self
, events
):
137 """Used for unittest."""
138 self
._events
= events
140 def ResponseFromEvent(self
, event
):
141 return ChromeProxyResponse(event
)
143 def AddResults(self
, tab
, results
):
144 raise NotImplementedError
146 def AddResultsForDataSaving(self
, tab
, results
):
147 resources_via_proxy
= 0
148 resources_from_cache
= 0
151 super(ChromeProxyMetric
, self
).AddResults(tab
, results
)
152 for resp
in self
.IterResponses(tab
):
153 if resp
.response
.served_from_cache
:
154 resources_from_cache
+= 1
155 if resp
.HasChromeProxyViaHeader():
156 resources_via_proxy
+= 1
158 resources_direct
+= 1
160 results
.AddValue(scalar
.ScalarValue(
161 results
.current_page
, 'resources_via_proxy', 'count',
162 resources_via_proxy
))
163 results
.AddValue(scalar
.ScalarValue(
164 results
.current_page
, 'resources_from_cache', 'count',
165 resources_from_cache
))
166 results
.AddValue(scalar
.ScalarValue(
167 results
.current_page
, 'resources_direct', 'count', resources_direct
))
169 def AddResultsForHeaderValidation(self
, tab
, results
):
172 for resp
in self
.IterResponses(tab
):
173 if resp
.IsValidByViaHeader():
176 bypassed
, _
= self
.IsProxyBypassed(tab
)
178 logging
.warning('Proxy bypassed for %s', resp
.response
.url
)
182 raise ChromeProxyMetricException
, (
183 '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
184 r
.url
, r
.GetHeader('Via'), r
.GetHeader('Referer'), r
.status
))
185 results
.AddValue(scalar
.ScalarValue(
186 results
.current_page
, 'checked_via_header', 'count', via_count
))
187 results
.AddValue(scalar
.ScalarValue(
188 results
.current_page
, 'request_bypassed', 'count', bypass_count
))
190 def AddResultsForClientVersion(self
, tab
, results
):
191 for resp
in self
.IterResponses(tab
):
193 if resp
.response
.status
!= 200:
194 raise ChromeProxyMetricException
, ('%s: Response is not 200: %d' %
196 if not resp
.IsValidByViaHeader():
197 raise ChromeProxyMetricException
, ('%s: Response missing via header' %
199 results
.AddValue(scalar
.ScalarValue(
200 results
.current_page
, 'version_test', 'count', 1))
202 def GetClientTypeFromRequests(self
, tab
):
203 """Get the Chrome-Proxy client type value from requests made in this tab.
206 The client type value from the first request made in this tab that
207 specifies a client type in the Chrome-Proxy request header. See
208 ChromeProxyResponse.GetChromeProxyClientType for more details about the
209 Chrome-Proxy client type. Returns None if none of the requests made in
210 this tab specify a client type.
212 for resp
in self
.IterResponses(tab
):
213 client_type
= resp
.GetChromeProxyClientType()
218 def AddResultsForClientType(self
, tab
, results
, client_type
,
219 bypass_for_client_type
):
223 for resp
in self
.IterResponses(tab
):
224 if resp
.HasChromeProxyViaHeader():
226 if client_type
.lower() == bypass_for_client_type
.lower():
227 raise ChromeProxyMetricException
, (
228 '%s: Response for client of type "%s" has via header, but should '
230 resp
.response
.url
, bypass_for_client_type
, client_type
))
231 elif resp
.ShouldHaveChromeProxyViaHeader():
233 if client_type
.lower() != bypass_for_client_type
.lower():
234 raise ChromeProxyMetricException
, (
235 '%s: Response missing via header. Only "%s" clients should '
236 'bypass for this page, but this client is "%s".' % (
237 resp
.response
.url
, bypass_for_client_type
, client_type
))
239 results
.AddValue(scalar
.ScalarValue(
240 results
.current_page
, 'via', 'count', via_count
))
241 results
.AddValue(scalar
.ScalarValue(
242 results
.current_page
, 'bypass', 'count', bypass_count
))
244 def ProxyListForDev(self
, proxies
):
245 return [self
.effective_proxies
['proxy-dev']
246 if proxy
== self
.effective_proxies
['proxy']
247 else proxy
for proxy
in proxies
]
249 def IsProxyBypassed(self
, tab
):
250 """Get whether all configured proxies are bypassed.
253 A tuple of the form (boolean, string list). If all configured proxies
254 are bypassed, then the return value will be (True, bypassed proxies).
255 Otherwise, the return value will be (False, empty list).
260 info
= GetProxyInfoFromNetworkInternals(tab
)
261 if not info
['enabled']:
262 raise ChromeProxyMetricException
, (
263 'Chrome proxy should be enabled. proxy info: %s' % info
)
264 if not info
['badProxies']:
267 bad_proxies
= [str(p
['proxy']) for p
in info
['badProxies']]
268 # Expect all but the "direct://" proxy to be bad.
269 expected_bad_proxies
= info
['proxies'][:-1]
270 if set(bad_proxies
) == set(expected_bad_proxies
):
271 return True, expected_bad_proxies
274 def VerifyBadProxies(self
, bad_proxies
, expected_bad_proxies
):
275 """Verify the bad proxy list and their retry times are expected.
278 bad_proxies: the list of actual bad proxies and their retry times.
279 expected_bad_proxies: a list of dictionaries in the form:
281 {'proxy': <proxy origin>,
282 'retry_seconds_low': <minimum bypass duration in seconds>,
283 'retry_seconds_high': <maximum bypass duration in seconds>}
285 If an element in the list is missing either the 'retry_seconds_low'
286 entry or the 'retry_seconds_high' entry, the default bypass minimum
287 and maximum durations respectively will be used for that element.
291 if len(bad_proxies
) != len(expected_bad_proxies
):
292 raise ChromeProxyMetricException
, (
293 'Actual and expected bad proxy lists should match: %s vs. %s' % (
294 str(bad_proxies
), str(expected_bad_proxies
)))
296 # Check that each of the proxy origins and retry times match.
297 for expected_bad_proxy
in expected_bad_proxies
:
298 # Find a matching actual bad proxy origin, allowing for the proxy-dev
299 # origin in the place of the HTTPS proxy origin.
301 for actual_proxy
in bad_proxies
:
302 if (expected_bad_proxy
['proxy'] == actual_proxy
['proxy'] or (
303 self
.effective_proxies
['proxy-dev'] == actual_proxy
['proxy'] and
304 self
.effective_proxies
['proxy'] == expected_bad_proxy
['proxy'])):
305 bad_proxy
= actual_proxy
308 raise ChromeProxyMetricException
, (
309 'No match for expected bad proxy %s - actual and expected bad '
310 'proxies should match: %s vs. %s' % (expected_bad_proxy
['proxy'],
312 str(expected_bad_proxies
)))
314 # Check that the retry times match.
315 retry_seconds_low
= expected_bad_proxy
.get('retry_seconds_low',
316 DEFAULT_BYPASS_MIN_SECONDS
)
317 retry_seconds_high
= expected_bad_proxy
.get('retry_seconds_high',
318 DEFAULT_BYPASS_MAX_SECONDS
)
319 retry_time_low
= (datetime
.datetime
.now() +
320 datetime
.timedelta(seconds
=retry_seconds_low
))
321 retry_time_high
= (datetime
.datetime
.now() +
322 datetime
.timedelta(seconds
=retry_seconds_high
))
323 got_retry_time
= datetime
.datetime
.fromtimestamp(
324 int(bad_proxy
['retry'])/1000)
325 if not ProxyRetryTimeInRange(
326 got_retry_time
, retry_time_low
, retry_time_high
):
327 raise ChromeProxyMetricException
, (
328 'Bad proxy %s retry time (%s) should be within range (%s-%s).' % (
329 bad_proxy
['proxy'], str(got_retry_time
), str(retry_time_low
),
330 str(retry_time_high
)))
332 def VerifyAllProxiesBypassed(self
, tab
):
333 """Verify that all proxies are bypassed for 1 to 5 minutes."""
335 info
= GetProxyInfoFromNetworkInternals(tab
)
336 if not info
['enabled']:
337 raise ChromeProxyMetricException
, (
338 'Chrome proxy should be enabled. proxy info: %s' % info
)
339 is_bypassed
, expected_bad_proxies
= self
.IsProxyBypassed(tab
)
341 raise ChromeProxyMetricException
, (
342 'Chrome proxy should be bypassed. proxy info: %s' % info
)
343 self
.VerifyBadProxies(info
['badProxies'],
344 [{'proxy': p
} for p
in expected_bad_proxies
])
346 def AddResultsForBypass(self
, tab
, results
):
348 for resp
in self
.IterResponses(tab
):
349 if resp
.HasChromeProxyViaHeader():
351 raise ChromeProxyMetricException
, (
352 '%s: Should not have Via header (%s) (refer=%s, status=%d)' % (
353 r
.url
, r
.GetHeader('Via'), r
.GetHeader('Referer'), r
.status
))
356 self
.VerifyAllProxiesBypassed(tab
)
357 results
.AddValue(scalar
.ScalarValue(
358 results
.current_page
, 'bypass', 'count', bypass_count
))
360 def AddResultsForCorsBypass(self
, tab
, results
):
361 eligible_response_count
= 0
364 for resp
in self
.IterResponses(tab
):
365 logging
.warn('got a resource %s' % (resp
.response
.url
))
367 for resp
in self
.IterResponses(tab
):
368 if resp
.ShouldHaveChromeProxyViaHeader():
369 eligible_response_count
+= 1
370 if not resp
.HasChromeProxyViaHeader():
372 elif resp
.response
.status
== 502:
373 bypasses
[resp
.response
.url
] = 0
375 for resp
in self
.IterResponses(tab
):
376 if resp
.ShouldHaveChromeProxyViaHeader():
377 if not resp
.HasChromeProxyViaHeader():
378 if resp
.response
.status
== 200:
379 if (bypasses
.has_key(resp
.response
.url
)):
380 bypasses
[resp
.response
.url
] = bypasses
[resp
.response
.url
] + 1
383 if bypasses
[url
] == 0:
384 raise ChromeProxyMetricException
, (
385 '%s: Got a 502 without a subsequent 200' % (url
))
386 elif bypasses
[url
] > 1:
387 raise ChromeProxyMetricException
, (
388 '%s: Got a 502 and multiple 200s: %d' % (url
, bypasses
[url
]))
389 if bypass_count
== 0:
390 raise ChromeProxyMetricException
, (
391 'At least one response should be bypassed. '
392 '(eligible_response_count=%d, bypass_count=%d)\n' % (
393 eligible_response_count
, bypass_count
))
395 results
.AddValue(scalar
.ScalarValue(
396 results
.current_page
, 'cors_bypass', 'count', bypass_count
))
398 def AddResultsForBlockOnce(self
, tab
, results
):
399 eligible_response_count
= 0
401 for resp
in self
.IterResponses(tab
):
402 if resp
.ShouldHaveChromeProxyViaHeader():
403 eligible_response_count
+= 1
404 if not resp
.HasChromeProxyViaHeader():
408 info
= GetProxyInfoFromNetworkInternals(tab
)
409 if not info
['enabled']:
410 raise ChromeProxyMetricException
, (
411 'Chrome proxy should be enabled. proxy info: %s' % info
)
412 self
.VerifyBadProxies(info
['badProxies'], [])
414 if eligible_response_count
<= 1:
415 raise ChromeProxyMetricException
, (
416 'There should be more than one DRP eligible response '
417 '(eligible_response_count=%d, bypass_count=%d)\n' % (
418 eligible_response_count
, bypass_count
))
419 elif bypass_count
!= 1:
420 raise ChromeProxyMetricException
, (
421 'Exactly one response should be bypassed. '
422 '(eligible_response_count=%d, bypass_count=%d)\n' % (
423 eligible_response_count
, bypass_count
))
425 results
.AddValue(scalar
.ScalarValue(
426 results
.current_page
, 'eligible_responses', 'count',
427 eligible_response_count
))
428 results
.AddValue(scalar
.ScalarValue(
429 results
.current_page
, 'bypass', 'count', bypass_count
))
431 def AddResultsForSafebrowsing(self
, tab
, results
):
433 safebrowsing_count
= 0
434 for resp
in self
.IterResponses(tab
):
436 if resp
.IsSafebrowsingResponse():
437 safebrowsing_count
+= 1
440 raise ChromeProxyMetricException
, (
441 '%s: Not a valid safe browsing response.\n'
442 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
443 r
.url
, r
.status
, r
.status_text
, r
.headers
))
444 if count
== safebrowsing_count
:
445 results
.AddValue(scalar
.ScalarValue(
446 results
.current_page
, 'safebrowsing', 'boolean', True))
448 raise ChromeProxyMetricException
, (
449 'Safebrowsing failed (count=%d, safebrowsing_count=%d)\n' % (
450 count
, safebrowsing_count
))
452 def VerifyProxyInfo(self
, tab
, expected_proxies
, expected_bad_proxies
):
453 info
= GetProxyInfoFromNetworkInternals(tab
)
454 if not 'enabled' in info
or not info
['enabled']:
455 raise ChromeProxyMetricException
, (
456 'Chrome proxy should be enabled. proxy info: %s' % info
)
457 proxies
= info
['proxies']
458 if (set(proxies
) != set(expected_proxies
) and
459 set(proxies
) != set(self
.ProxyListForDev(expected_proxies
))):
460 raise ChromeProxyMetricException
, (
461 'Wrong effective proxies (%s). Expect: "%s"' % (
462 str(proxies
), str(expected_proxies
)))
465 if 'badProxies' in info
and info
['badProxies']:
466 bad_proxies
= [p
['proxy'] for p
in info
['badProxies']
467 if 'proxy' in p
and p
['proxy']]
468 if (set(bad_proxies
) != set(expected_bad_proxies
) and
469 set(bad_proxies
) != set(self
.ProxyListForDev(expected_bad_proxies
))):
470 raise ChromeProxyMetricException
, (
471 'Wrong bad proxies (%s). Expect: "%s"' % (
472 str(bad_proxies
), str(expected_bad_proxies
)))
474 def AddResultsForHTTPFallback(
475 self
, tab
, results
, expected_proxies
=None, expected_bad_proxies
=None):
476 if not expected_proxies
:
477 expected_proxies
= [self
.effective_proxies
['fallback'],
478 self
.effective_proxies
['direct']]
479 if not expected_bad_proxies
:
480 expected_bad_proxies
= []
482 self
.VerifyProxyInfo(tab
, expected_proxies
, expected_bad_proxies
)
483 results
.AddValue(scalar
.ScalarValue(
484 results
.current_page
, 'http_fallback', 'boolean', True))
486 def AddResultsForHTTPToDirectFallback(self
, tab
, results
):
487 self
.VerifyAllProxiesBypassed(tab
)
488 results
.AddValue(scalar
.ScalarValue(
489 results
.current_page
, 'direct_fallback', 'boolean', True))
491 def AddResultsForExplicitBypass(self
, tab
, results
, expected_bad_proxies
):
492 """Verify results for an explicit bypass test.
495 tab: the tab for the test.
496 results: the results object to add the results values to.
497 expected_bad_proxies: A list of dictionary objects representing
498 expected bad proxies and their expected retry time windows.
499 See the definition of VerifyBadProxies for details.
501 info
= GetProxyInfoFromNetworkInternals(tab
)
502 if not 'enabled' in info
or not info
['enabled']:
503 raise ChromeProxyMetricException
, (
504 'Chrome proxy should be enabled. proxy info: %s' % info
)
505 self
.VerifyBadProxies(info
['badProxies'],
506 expected_bad_proxies
)
507 results
.AddValue(scalar
.ScalarValue(
508 results
.current_page
, 'explicit_bypass', 'boolean', True))